#!/bin/sh # # # # # # # # # # # # # # # # # # # # # # # @author : wangjf # @date : 2019-08-28 18:06:59 # # # # # # # # # # # # # # # # # # # # # # source ../dmp_env.sh LOG_TIME=$(date -d "$ScheduleTime" +"%Y%m%d") date_path=$(date -d "$ScheduleTime" +"%Y/%m/%d") OUTPUT_PATH="${ALI_DAILY_PATH}/$date_path" host="ip-172-31-20-35.ec2.internal" cluster="cluster_1st" database="dwh" table="etl_baichuan_daily" spark-submit --class mobvista.dmp.datasource.baichuan.AliDaily \ --name "mobvista.dmp.datasource.baichuan.AliDaily_wangjf_${LOG_TIME}" \ --conf spark.sql.shuffle.partitions=100 \ --conf spark.default.parallelism=100 \ --conf spark.kryoserializer.buffer.max=256m \ --conf spark.driver.extraJavaOptions="-XX:+UseG1GC" \ --conf spark.executor.extraJavaOptions="-XX:+UseG1GC" \ --master yarn --deploy-mode cluster --executor-memory 4g --driver-memory 4g --executor-cores 3 --num-executors 5 \ ../${JAR} -date ${LOG_TIME} -partNum 10 -output ${OUTPUT_PATH} -host ${host} -cluster ${cluster} -database ${database} -table ${table} if [[ $? -ne 0 ]];then exit 255 fi mount_partition "etl_baichuan_daily" "dt='${LOG_TIME}'" "$OUTPUT_PATH"