#!/usr/bin/env bash

source ../../dmp_env.sh
source ././../../ga_rawdata_analysis/common/tools.sh

dt_today=$(date -d "$ScheduleTime tomorrow" +"%Y%m%d")
dt_oneday_ago=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d")
dt_slash_today=$(date -d "$ScheduleTime tomorrow" +"%Y/%m/%d")
hour="01"

dt_slash_oneday=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d")
expire_date=$(date -d "$ScheduleTime 61 days ago" +"%Y%m%d")
expire_date_path=$(date -d "$ScheduleTime 61 days ago" +"%Y/%m/%d")
check_await "${DM_INSTALL_LIST}_v2/${dt_slash_oneday}/dsp_req/_SUCCESS"



OUTPUT_PATH01="${ALIPAY_ACTIVATION_DAILY_PATH}/${dt_slash_today}/${hour}/other_data/foractivation_l00016_alipayoppo"
OUTPUT_PATH02="${ALIPAY_ACTIVATION_DAILY_PATH}/${dt_slash_today}/${hour}/other_data/foractivation_l00016_alipaybes"

OUTPUT_PATH="${ALIPAY_ACTIVATION_DAILY_PATH}/${dt_slash_today}/${hour}/other_data"

hadoop fs -rm -r "${OUTPUT_PATH}"


spark-submit --class mobvista.dmp.datasource.taobao.AlipayOtherDataToDmp \
 --conf spark.network.timeout=720s \
 --conf spark.default.parallelism=3000 \
 --conf spark.sql.shuffle.partitions=3000 \
 --conf spark.sql.broadcastTimeout=1200 \
 --conf spark.sql.autoBroadcastJoinThreshold=31457280 \
 --master yarn --deploy-mode cluster  --executor-memory 8g --driver-memory 4g  --executor-cores 5  --num-executors 140 \
 ../../${JAR}  \
 -output01 ${OUTPUT_PATH01} -output02 ${OUTPUT_PATH02}  \
 -dt_today ${dt_today} -dt_oneday_ago ${dt_oneday_ago}  -hour ${hour}

if [ $? -ne 0 ];then
  exit 255
fi

mount_partition "etl_alipay_activation_daily" "dt='${dt_today}', hh='${hour}', business='other_data'" "${OUTPUT_PATH}"


#HIVE_CMD=$(hive_func)

#$HIVE_CMD	-v  -hivevar  dt_today ${dt_today}   hour ${hour}    -f	alipay_other_data_to_dmp.sql

if [ $? -ne 0 ];then
  exit 255
fi


hadoop fs -touchz "${ALIPAY_ACTIVATION_DAILY_PATH}/${dt_slash_today}/${hour}/_SUCCESS"