3s_postback_daily.sh 1.75 KB
Newer Older
wang-jinfeng committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56
#!/usr/bin/env bash


source ../dmp_env.sh

today=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d")
yesterday=$(date -d "$ScheduleTime 2 days ago" +"%Y/%m/%d")
dt_today=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d")
dt_yesterday=$(date -d "$ScheduleTime 2 days ago" +"%Y%m%d")
echo ${today}
echo ${yesterday}

: '
#由日期获得上周日日期
week=`date -d "$today" +%w`
echo "week=$week"
if [ "$week" -eq "0" ];then
 week=7 #若为周日,则表示为7
fi
if [ "$week" -eq "1" ];then
 week=8 #若为周一,则表示为8,取上上周日数据
fi
last_sunday=$(date +%Y%m%d -d "-$week day $today")
echo "last_sunday=$last_sunday"
'

OUTPUT_PATH="${ETL_3S_POSTBACK_DAILY_PATH}/${today}"
POSTBACK_INSTALL_3S="${POSTBACK_INSTALL_3S_PATH}/${today}/virginia/23/_SUCCESS"
POSTBACK_EVENT_3S="${POSTBACK_EVENT_3S_PATH}/${today}/virginia/23/_SUCCESS"

check_await "${POSTBACK_INSTALL_3S}"
check_await "${POSTBACK_EVENT_3S}"
check_await "${DEVICE_ID_MD5_MATCH_PATH}/${yesterday}/_SUCCESS"

hadoop fs -rm -r "$OUTPUT_PATH/"

spark-submit --class mobvista.dmp.datasource.postback_3s.PostBackDaily \
 --conf spark.network.timeout=720s \
 --conf spark.default.parallelism=2000 \
 --conf spark.sql.shuffle.partitions=1000 \
 --conf spark.sql.broadcastTimeout=1200 \
 --conf spark.sql.autoBroadcastJoinThreshold=31457280 \
 --jars s3://mob-emr-test/dataplatform/DataWareHouse/offline/myjar/hive-hcatalog-core-2.3.3.jar \
 --master yarn --deploy-mode cluster  --executor-memory 6g --driver-memory 4g  --executor-cores 3  --num-executors 100 \
 ../${JAR} -output ${OUTPUT_PATH}  -coalesce 100 \
 -today ${dt_today}  -last_sunday ${dt_yesterday}



if [ $? -ne 0 ];then
  exit 255
fi

mount_partition "etl_3s_postback_daily" "dt='${dt_today}'" "$OUTPUT_PATH"

hadoop fs -touchz ${OUTPUT_PATH}/_SUCCESS