#! /bin/bash

# # # # # # # # # # # # # # # # # # # # # #
# @author : 冯亮
# @date : 2018-04-25
# @desc : 将头条投放天数据合并到总量数据中
# # # # # # # # # # # # # # # # # # # # # #

source ../dmp_env.sh

date_path=$(date +"%Y/%m/%d" -d "-1 day $ScheduleTime")
dt=$(date +"%Y%m%d" -d "-1 day $ScheduleTime")
old_date=$(date +"%Y%m%d" -d "-2 day $ScheduleTime")
old_path=$(date +"%Y/%m/%d" -d "-2 day $ScheduleTime")
update_date=$(date +"%Y-%m-%d" -d "-1 day $ScheduleTime")

DAILY_PATH="${ETL_TOUTIAO_LAUNCH_DAILY}/$date_path"
YESTODAY_PATH="${DM_TOUTIAO_LAUNCH_TOTAL}/$old_path"
OUTPUT_PATH="${DM_TOUTIAO_LAUNCH_TOTAL}/$date_path"

reduce_num=`calculate_reduce_num "${DAILY_PATH};${YESTODAY_PATH}"`

hadoop fs -rm -r ${OUTPUT_PATH}

spark-submit --class mobvista.dmp.datasource.toutiao.DmToutiaoTotal \
    --conf spark.sql.shuffle.partitions=${reduce_num} \
    --conf spark.default.parallelism=200 \
    --deploy-mode cluster --executor-memory 8g --driver-memory 4g  --executor-cores 4 --num-executors 50 \
    ../${JAR} -output $OUTPUT_PATH -date ${dt} -yestoday ${old_date} -updateDate ${update_date}
if [ $? -ne 0 ];then
  exit 255
fi

mount_partition "dm_toutiao_launch_total" "dt='$dt'" "$OUTPUT_PATH"