#!/bin/sh source ../dmp_env.sh today=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d") yesterday=$(date -d "$ScheduleTime 2 days ago" +"%Y/%m/%d") dt_today=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d") dt=$(date -d "$ScheduleTime 1 days ago" +"%Y-%m-%d") dt_yesterday=$(date -d "$ScheduleTime 2 days ago" +"%Y%m%d") echo ${today} echo ${yesterday} INPUT_PATH="${ETL_3S_POSTBACK_DAILY_PATH}/${today}" OLD_INPUT_PATH="${DM_INSTALL_LIST}/${yesterday}/allpb" OUTPUT_PATH="${DM_INSTALL_LIST}/${today}/allpb" check_await "$INPUT_PATH/_SUCCESS" check_await "$OLD_INPUT_PATH/_SUCCESS" hadoop fs -rm -r "$OUTPUT_PATH/" spark-submit --class mobvista.dmp.datasource.postback_3s.PostBackInstallList \ --conf spark.yarn.executor.memoryOverhead=2048 \ --conf spark.network.timeout=720s \ --conf spark.default.parallelism=1000 \ --master yarn --deploy-mode cluster --name PostBackInstallList --executor-memory 8g --driver-memory 4g --executor-cores 4 --num-executors 20 \ ../${JAR} -input ${INPUT_PATH} -oldInput ${OLD_INPUT_PATH} -output ${OUTPUT_PATH} -date ${dt} -parallelism 1000 -coalesce 400 if [ $? -ne 0 ];then exit 255 fi mount_partition "dm_install_list" "year='${dt_today:0:4}', month='${dt_today:4:2}', day='${dt_today:6:2}', business='allpb'" "$OUTPUT_PATH"