#!/bin/bash # # # # # # # # # # # # # # # # # # # # # # # @file :3s_trackingnew_install_daily.sh # @author :wangjf # @revision:2019-04-30 16:20:25 # # # # # # # # # # # # # # # # # # # # # # source ../../dmp_env.sh today=${ScheduleTime:-$1} date=$(date +"%Y%m%d" -d "-1 day $today") date_path=$(date +%Y/%m/%d -d "-1 day $today") expire_date=$(date +%Y%m%d -d "-4 day $today") expire_date_path=$(date +%Y/%m/%d -d "-4 day $today") PATH_3S_EVENT="s3://trackingcsv-3s/trackingcsv/install" INPUT_PATH_3S="${PATH_3S_EVENT}/$date_path/" check_await "${INPUT_PATH_3S}/23/45/_SUCCESS" output_path="s3://mob-emr-test/dataplatform/DataWareHouse/data/dwh/etl_tracking_install_daily/${date_path}" spark-submit --class mobvista.dmp.datasource.datatory.TrackingInstallDaily \ --conf spark.sql.shuffle.partitions=10 \ --conf spark.default.parallelism=10 \ --conf spark.sql.files.maxPartitionBytes=536870912 \ --conf spark.executor.extraJavaOptions="-XX:+UseG1GC" \ --master yarn --deploy-mode cluster --executor-memory 4g --driver-memory 4g --executor-cores 2 --num-executors 50 \ ../../${JAR} -date ${date} -output ${output_path} if [[ $? -ne 0 ]]; then exit 255 fi # 挂载分区 mount_partition "etl_tracking_install_daily" "dt='${date}'" "${output_path}"