#!/usr/bin/env bash
source ../../dmp_env.sh
LOG_TIME=$(date -d "$ScheduleTime" +"%Y%m%d")
date_path=$(date -d "$ScheduleTime tomorrow" +"%Y/%m/%d")
dt_end_days=$(date -d "$ScheduleTime" +"%Y-%m-%d")
hour="04"
IMEIMD5_OUTPUT_PATH="${ALIPAY_LAHUO_DAILY_TMP_PATH}/$date_path/imeimd5/${hour}"
hadoop fs -rm -r "${IMEIMD5_OUTPUT_PATH}"
spark-submit --class mobvista.dmp.datasource.taobao.AlipayLaHuoDF \
--name "AlipayLaHuoDF.${LOG_TIME}" \
--conf spark.sql.shuffle.partitions=1000 \
--conf spark.default.parallelism=1000 \
--conf spark.kryoserializer.buffer.max=256m \
--conf spark.driver.extraJavaOptions="-XX:+UseG1GC" \
--conf spark.executor.extraJavaOptions="-XX:+UseG1GC" \
--master yarn --deploy-mode cluster --executor-memory 8g --driver-memory 4g --executor-cores 5 --num-executors 10 \
../../${JAR} -date ${LOG_TIME} -dt_end_days ${dt_end_days} -partNum 60 -imeiOutput ${IMEIMD5_OUTPUT_PATH} -cluster 'cluster_1st' -hour ${hour}
if [[ $? -ne 0 ]];then
exit 255
fi