#!/bin/bash source ../dmp_env.sh LOG_TIME=$(date +%Y%m%d -d "-1 day $ScheduleTime") date_path=$(date +'%Y/%m/%d' -d "-1 day $ScheduleTime") yesterday_path=$(date +"%Y/%m/%d" -d "-1 day ${LOG_TIME}") DAILY_PATH="${ETL_MPARTICLE_ORG_DAILY}/yyyymmdd=${LOG_TIME}" check_await "${DAILY_PATH}/_SUCCESS" mount_partition "etl_mparticle_king_audience_org" "dt='${LOG_TIME}'" "${DAILY_PATH}" sleep 60 business="mparticle" installListJob "${LOG_TIME}" "${date_path}" "${yesterday_path}" "${business}" 20 10 4 4 5 : ' INSTALL_PATH="${DMP_INSTALL_LIST}/${yesterday_path}/${business}" check_await "${INSTALL_PATH}/_SUCCESS" OUTPUT="${DMP_INSTALL_LIST}/${date_path}/${business}" expire_date=$(date +%Y%m%d -d "-15 day $LOG_TIME") expire_date_path=$(date +"%Y/%m/%d" -d "-15 day ${LOG_TIME}") EXPIRE_OUTPUT_PATH="${DMP_INSTALL_LIST}/${expire_date_path}/${business}" spark-submit --class mobvista.dmp.common.InstallListLogic \ --name "DmpInstallList.${business}.${LOG_TIME}" \ --conf spark.sql.shuffle.partitions=200 \ --conf spark.default.parallelism=200 \ --conf spark.kryoserializer.buffer.max=256m \ --conf spark.executor.extraJavaOptions="-XX:+UseG1GC" \ --files ${HIVE_SITE_PATH} \ --jars ${SPARK_HOME}/auxlib/Common-SerDe-1.0-SNAPSHOT.jar \ --master yarn --deploy-mode cluster --executor-memory 4g --driver-memory 4g --executor-cores 2 --num-executors 10 \ ../${JAR} -date ${LOG_TIME} -business ${business} -output ${OUTPUT} -coalesce 40 if [[ $? -ne 0 ]];then exit 255 fi ' # mount_partition "dmp_install_list" "dt='$LOG_TIME', business='$business'" "$OUTPUT" # 删除过期的分区及删除对应路径 # unmount_partition "dmp_install_list" "dt='${expire_date}', business='${business}'" "${EXPIRE_OUTPUT_PATH}"