#!/usr/bin/env bash source ../../dmp_env.sh source ././../../ga_rawdata_analysis/common/tools.sh dt_today=$(date -d "$ScheduleTime" +"%Y%m%d") dt_oneday_ago=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d") dt_slash_today=$(date -d "$ScheduleTime" +"%Y/%m/%d") update=$(date -d "$ScheduleTime" +"%Y-%m-%d") dt_slash_oneday=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d") expire_date=$(date -d "$ScheduleTime 61 days ago" +"%Y%m%d") expire_date_path=$(date -d "$ScheduleTime 61 days ago" +"%Y/%m/%d") check_await "${DM_INSTALL_LIST}_v2/${dt_slash_oneday}/dsp_req/_SUCCESS" DMP_INSTALL_LIST_OUTPUT_PATH="${DM_INSTALL_LIST}_v2/${dt_slash_today}/uc_activation" EXPIRE_OUTPUT_PATH="${UC_LAHUO_TMP_DAILY_TO_S3}/${expire_date_path}/uc_activation_other_data" OUTPUT_PATH01="${UC_LAHUO_TMP_DAILY_TO_S3}/${dt_slash_today}/uc_activation_other_data/4b5a58_ucbes" OUTPUT_PATH02="${UC_LAHUO_TMP_DAILY_TO_S3}/${dt_slash_today}/uc_activation_other_data/d3f521_ucbes" OUTPUT_PATH03="${UC_LAHUO_TMP_DAILY_TO_S3}/${dt_slash_today}/uc_activation_other_data/4b5a58_ucoppo" OUTPUT_PATH04="${UC_LAHUO_TMP_DAILY_TO_S3}/${dt_slash_today}/uc_activation_other_data/d3f521_ucoppo" OUTPUT_PATH="${UC_LAHUO_TMP_DAILY_TO_S3}/${dt_slash_today}/uc_activation_other_data" hadoop fs -rm -r "${OUTPUT_PATH}" spark-submit --class mobvista.dmp.datasource.taobao.UCOtherDataToDmp \ --conf spark.network.timeout=720s \ --conf spark.default.parallelism=3000 \ --conf spark.sql.shuffle.partitions=3000 \ --conf spark.sql.broadcastTimeout=1200 \ --conf spark.sql.autoBroadcastJoinThreshold=31457280 \ --master yarn --deploy-mode cluster --executor-memory 8g --driver-memory 4g --executor-cores 5 --num-executors 150 \ ../../${JAR} \ -output01 ${OUTPUT_PATH01} -output02 ${OUTPUT_PATH02} \ -output03 ${OUTPUT_PATH03} -output04 ${OUTPUT_PATH04} \ -dt_today ${dt_today} -dt_oneday_ago ${dt_oneday_ago} -update ${update} mount_partition "uc_lahuo_tmp_daily_to_s3" "dt='${dt_today}', business='uc_activation_other_data'" "${OUTPUT_PATH}" unmount_partition "uc_lahuo_tmp_daily_to_s3" "dt='${expire_date}', business='uc_activation_other_data'" "${EXPIRE_OUTPUT_PATH}" HIVE_CMD=$(hive_func) $HIVE_CMD -v -hivevar dt_today ${dt_today} -f uc_other_data_to_dmp.sql if [ $? -ne 0 ];then exit 255 fi hadoop fs -touchz ${DMP_INSTALL_LIST_OUTPUT_PATH}/_SUCCESS