#!/bin/sh
# # # # # # # # # # # # # # # # # # # # # #
# @author : wangjf
# @date : 2019-08-28 18:06:59
# # # # # # # # # # # # # # # # # # # # # #
source ../dmp_env.sh
LOG_TIME=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d")
date_path=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d")
ODS_USER_INFO_PATH="${ODS_DMP_USER_INFO_DAILY}/$LOG_TIME"
check_await "${ODS_USER_INFO_PATH}/_SUCCESS"
host="ip-172-31-20-35.ec2.internal"
cluster="cluster_1st"
database="dwh"
table="etl_baichuan_daily"
spark-submit --class mobvista.dmp.datasource.baichuan.BaiChuanJob \
--name "mobvista.dmp.datasource.baichuan.BaiChuanJob_wangjf_${LOG_TIME}" \
--conf spark.sql.shuffle.partitions=1000 \
--conf spark.default.parallelism=100 \
--conf spark.kryoserializer.buffer.max=256m \
--conf spark.sql.files.maxPartitionBytes=268435456 \
--files ${HIVE_SITE_PATH} \
--master yarn --deploy-mode cluster --executor-memory 6g --driver-memory 6g --executor-cores 2 --num-executors 20 \
../${JAR} -date ${LOG_TIME} -host ${host} -cluster ${cluster} -database ${database} -table ${table}
if [[ $? -ne 0 ]];then
exit 255
fi