#!/usr/bin/env bash

source ../../dmp_env.sh
source ././../../ga_rawdata_analysis/common/tools.sh

echo "job  begin!!!"

dt_today=$(date -d "$ScheduleTime" +"%Y%m%d")
dt_begin_days=$(date -d "$ScheduleTime" +"%Y%m%d")
dt_slash_today=$(date -d "$ScheduleTime" +"%Y/%m/%d")
update=$(date -d "$ScheduleTime" +"%Y-%m-%d")

INPUT_PATH="${YOUKU_LAXIN_DAILY_TMP_PATH}/${dt_slash_today}"
OUTPUT_PATH="${YOUKU_LAXIN_TMP_DAILY_TO_S3}/${dt_slash_today}/youku_acquisition"

check_await "${INPUT_PATH}/imeimd5/_SUCCESS"
check_await "${INPUT_PATH}/oaidmd5/_SUCCESS"

hadoop fs -rm -r "${OUTPUT_PATH}"

spark-submit --class mobvista.dmp.datasource.taobao.YoukuTmpDataToDmp \
 --conf spark.network.timeout=720s \
 --conf spark.default.parallelism=2000 \
 --conf spark.sql.shuffle.partitions=2000 \
 --conf spark.sql.broadcastTimeout=1200 \
 --conf spark.yarn.executor.memoryOverhead=4096 \
 --conf spark.sql.autoBroadcastJoinThreshold=31457280 \
 --files ${HIVE_SITE_PATH} \
 --master yarn --deploy-mode cluster  --executor-memory 8g --driver-memory 4g  --executor-cores 4  --num-executors 40 \
 ../../${JAR}  -Input "${INPUT_PATH}/*/*"   -Output ${OUTPUT_PATH}   \
  -update ${update}


if [ $? -ne 0 ];then
  exit 255
fi

DMP_INSTALL_LIST_ACQUISITION_PATH="${DM_INSTALL_LIST}_v2/${dt_slash_today}/youku_acquisition"
mount_partition "dm_install_list_v2" "dt='${dt_today}', business='youku_acquisition'" "${DMP_INSTALL_LIST_ACQUISITION_PATH}"
mount_partition "youku_laxin_tmp_daily_to_s3" "dt='${dt_today}', business='youku_acquisition'" "${OUTPUT_PATH}"


if [ $? -ne 0 ];then
  exit 255
fi

ACQUISITIONOUTPUT="${YOUKU_LAXIN_TMP_DAILY_TO_S3}/${dt_slash_today}/youku_acquisition_polling"

spark-submit --class mobvista.dmp.datasource.taobao.YoukuLaXinPollingDataDeduplication \
 --conf spark.network.timeout=720s \
 --conf spark.default.parallelism=2000 \
 --conf spark.sql.shuffle.partitions=2000 \
 --conf spark.sql.broadcastTimeout=1200 \
 --conf spark.yarn.executor.memoryOverhead=4096 \
 --conf spark.sql.autoBroadcastJoinThreshold=31457280 \
 --files ${HIVE_SITE_PATH} \
 --master yarn --deploy-mode cluster  --executor-memory 8g --driver-memory 4g  --executor-cores 4  --num-executors 40 \
 ../../${JAR}  -dt_today ${dt_today}  -dt_begin_days ${dt_begin_days} \
 -AcquisitionOutput ${ACQUISITIONOUTPUT}

if [ $? -ne 0 ];then
  exit 255
fi

mount_partition "youku_laxin_tmp_daily_to_s3" "dt='${dt_today}', business='youku_acquisition_polling'" "${ACQUISITIONOUTPUT}"

if [ $? -ne 0 ];then
  exit 255
fi

HIVE_CMD=$(hive_func)

$HIVE_CMD	-v  -hivevar  dt_today ${dt_today}     -f	youku_laxin_data_to_dmp.sql


if [ $? -ne 0 ];then
  exit 255
fi

hadoop fs -touchz ${DMP_INSTALL_LIST_ACQUISITION_PATH}/_SUCCESS


expire_date_path=$(date -d "$ScheduleTime 61 days ago" +"%Y/%m/%d")
EXPIRE_PATH="${YOUKU_LAXIN_TMP_DAILY_TO_S3}/${expire_date_path}"
hadoop fs -rm -r "${EXPIRE_PATH}"

echo "good job"