#!/usr/bin/env bash source ../../dmp_env.sh dt_today=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d") last_req_day=$(date -d "$ScheduleTime 1 days ago" +"%Y-%m-%d") dt_slash_today=$(date -d "$ScheduleTime" +"%Y/%m/%d") dt_after_one_day=$(date -d "$ScheduleTime" +"%Y%m%d") dt_one_day_ago=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d") ALIPAY_IMEIMD5_OUTPUT_PATH="${ALIPAY_LAHUO_DAILY_PATH}/${dt_slash_today}/imeimd5_request_data" dt_slash_one_day=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d") dt_slash_two_day=$(date -d "$ScheduleTime 2 days ago" +"%Y/%m/%d") dt_slash_three_day=$(date -d "$ScheduleTime 3 days ago" +"%Y/%m/%d") dt_slash_four_day=$(date -d "$ScheduleTime 4 days ago" +"%Y/%m/%d") INPUT_ONE_DAY="${ALIPAY_LAHUO_DAILY_PATH}/${dt_slash_one_day}/imeimd5_request_data" INPUT_TWO_DAY="${ALIPAY_LAHUO_DAILY_PATH}/${dt_slash_two_day}/imeimd5_request_data" INPUT_THREE_DAY="${ALIPAY_LAHUO_DAILY_PATH}/${dt_slash_three_day}/imeimd5_request_data" INPUT_FOUR_DAY="${ALIPAY_LAHUO_DAILY_PATH}/${dt_slash_four_day}/imeimd5_request_data" check_await "${ODS_DMP_USER_INFO}/${dt_one_day_ago}/adn_request/_SUCCESS" check_await "${ODS_DMP_USER_INFO}/${dt_one_day_ago}/dsp_req/_SUCCESS" #check_await "${ALIPAY_ACTIVATION_DAILY_PATH}/${dt_slash_today}/04/_SUCCESS" #check_await "${ALIPAY_ACQUISITION_DAILY_PATH}/${dt_slash_today}/04/_SUCCESS" hadoop fs -rm -r "${ALIPAY_IMEIMD5_OUTPUT_PATH}" spark-submit --class mobvista.dmp.datasource.taobao.AlipayLaHuoDaily \ --conf spark.network.timeout=720s \ --conf spark.default.parallelism=2000 \ --conf spark.sql.shuffle.partitions=2000 \ --conf spark.sql.broadcastTimeout=1200 \ --conf spark.yarn.executor.memoryOverhead=4096 \ --conf spark.sql.autoBroadcastJoinThreshold=31457280 \ --files ${HIVE_SITE_PATH} \ --master yarn --deploy-mode cluster --executor-memory 8g --driver-memory 4g --executor-cores 6 --num-executors 120 ../../${JAR} \ -imeioutput "${ALIPAY_IMEIMD5_OUTPUT_PATH}" \ -today ${dt_today} -last_req_day ${last_req_day} -dt_after_one_day ${dt_after_one_day}\ -input_one_day ${INPUT_ONE_DAY} -input_two_day ${INPUT_TWO_DAY} -input_three_day ${INPUT_THREE_DAY} \ -input_four_day ${INPUT_FOUR_DAY} if [ $? -ne 0 ];then exit 255 fi expire_date_path=$(date -d "$ScheduleTime 61 days ago" +"%Y/%m/%d") EXPIRE_PATH="${ALIPAY_LAHUO_DAILY_PATH}/${expire_date_path}" hadoop fs -rm -r "${EXPIRE_PATH}" echo "good job~~"