#!/bin/bash source ../dmp_env.sh today=${ScheduleTime} date_time=$(date +"%Y%m%d%H" -d "-1 hour $today") date_path=$(date +%Y/%m/%d/%H -d "-1 hour $today") INPUT="s3://mob-emr-test/dataplatform/rtdmp_pre/${date_path}" old_date_time=$(date +"%Y%m%d%H" -d "-2 hour $today") old_date_path=$(date +%Y/%m/%d/%H -d "-2 hour $today") OLD_MERGE_INPUT="s3://mob-emr-test/dataplatform/DataWareHouse/data/dwh/audience_merge/${old_date_path}" check_await ${OLD_MERGE_INPUT}/_SUCCESS sleep 120 OUTPUT="s3://mob-emr-test/dataplatform/DataWareHouse/data/dwh/audience_merge/${date_path}" spark-submit --class mobvista.dmp.datasource.rtdmp.RTDmpMain \ --name "RTDmpMain.${date_time}" \ --conf spark.sql.shuffle.partitions=2000 \ --conf spark.default.parallelism=2000 \ --conf spark.kryoserializer.buffer.max=512m \ --conf spark.kryoserializer.buffer=64m \ --master yarn --deploy-mode cluster \ --executor-memory 8g --driver-memory 6g --executor-cores 4 --num-executors 100 \ .././DMP.jar \ -datetime ${date_time} -old_datetime ${old_date_time} -input ${INPUT} -output ${OUTPUT} -coalesce 400 if [[ $? -ne 0 ]]; then exit 255 fi mount_partition "audience_merge" "dt='${date_time}'" "$OUTPUT" expire_time=$(date +"%Y%m%d%H" -d "-24 hour $today") expire_date_path=$(date +%Y/%m/%d/%H -d "-24 hour $today") EXPIRE_OUTPUT_PATH="s3://mob-emr-test/dataplatform/DataWareHouse/data/dwh/audience_merge/${expire_date_path}" unmount_partition "audience_merge" "dt='${expire_time}'" "${EXPIRE_OUTPUT_PATH}"