#!/usr/bin/env bash source ../../dmp_env.sh dt_today=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d") dt_slash_today=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d") dt_dash_rec7day=$(date -d "$ScheduleTime 7 days ago" +"%Y-%m-%d") dt_dash_rec15day=$(date -d "$ScheduleTime 15 days ago" +"%Y-%m-%d") OUTPUT_PATH="${ETL_COM_TENCENT_NEWS_DAILY}/${dt_slash_today}" check_await "${DM_INSTALL_LIST}_v2/${dt_slash_today}/dsp_req/_SUCCESS" hadoop fs -rm -r "${OUTPUT_PATH}" spark-submit --class mobvista.dmp.datasource.taobao.EtlComTencentNewsDaily \
--conf spark.network.timeout=720s \ --conf spark.default.parallelism=3000 \ --conf spark.sql.shuffle.partitions=3000 \ --conf spark.sql.broadcastTimeout=1200 \ --conf spark.sql.autoBroadcastJoinThreshold=31457280 \ --master yarn --deploy-mode cluster --executor-memory 8g --driver-memory 6g --executor-cores 5 --num-executors 80 \ ../../${JAR} \ -output ${OUTPUT_PATH} -coalesce 500 \ -dt_today ${dt_today} -dt_dash_rec7day ${dt_dash_rec7day} -dt_dash_rec15day ${dt_dash_rec15day}
if [ $? -ne 0 ];then exit 255 fi mount_partition "etl_com_tencent_news_daily" "dt='${dt_today}'" "$OUTPUT_PATH" hadoop fs -touchz ${OUTPUT_PATH}/_SUCCESS