#!/usr/bin/env bash

source ../dmp_env.sh

today=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d")
yesterday=$(date -d "$ScheduleTime 2 days ago" +"%Y/%m/%d")
dt_today=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d")
dt_yesterday=$(date -d "$ScheduleTime 2 days ago" +"%Y%m%d")
umount_time=$(date -d "$ScheduleTime 6 days ago" +"%Y%m%d")
umount_date_path=$(date -d "$ScheduleTime 6 days ago" +"%Y/%m/%d")

echo ${today}
echo ${yesterday}

: '
#由日期获得上周日日期
week=`date -d "$today" +%w`
echo "week=$week"
if [ "$week" -eq "0" ];then
 week=7 #若为周日,则表示为7
fi
if [ "$week" -eq "1" ];then
 week=8 #若为周一,则表示为8,取上上周日数据
fi
last_sunday=$(date +%Y%m%d -d "-$week day $today")
'
last_sunday=$dt_yesterday
echo "last_sunday=$last_sunday"

BIGMEDIA_DATE=$(get_recently_date "${BIGMEDIA_PATH}" "${dt_yesterday}")

echo "BIGMEDIA_DATE=${BIGMEDIA_DATE}"

BIGMEDIA_DATE_PATH=$(date +"%Y/%m/%d" -d "${BIGMEDIA_DATE}")

INPUT_PATH_DAILY="${BIGMEDIA_PATH}/$BIGMEDIA_DATE_PATH"

OUTPUT_BIGMEDIA_DAILY_PATH="${BIGMEDIA_DAILY_PATH}/${yesterday}"

GENDER_BIG_MEDIA_OUTPUT_PATH="${GENDER_GET_BIG_MEDIA_PATH}/${yesterday}"
UMOUNT_GENDER_BIG_MEDIA_OUTPUT_PATH="${GENDER_GET_BIG_MEDIA_PATH}/${umount_date_path}"

check_await "${INPUT_PATH_DAILY}/etl_toutiao_dmp_daily.log"
check_await "${DEVICE_ID_MD5_MATCH_PATH}/${yesterday}/_SUCCESS"

hadoop fs -rm -r "${OUTPUT_BIGMEDIA_DAILY_PATH}/"
hadoop fs -rm -r "${GENDER_BIG_MEDIA_OUTPUT_PATH}/"

spark-submit --class mobvista.dmp.datasource.bigmedia_domestic.BigMediaDomestic \
 --conf spark.yarn.executor.memoryOverhead=2048  \
 --conf spark.network.timeout=720s \
 --conf spark.default.parallelism=400 \
 --conf spark.sql.shuffle.partitions=400 \
 --conf spark.sql.autoBroadcastJoinThreshold=31457280 \
 --jars s3://mob-emr-test/dataplatform/DataWareHouse/offline/myjar/hive-hcatalog-core-2.3.3.jar \
 --master yarn --deploy-mode cluster --name BigMediaDomestic --executor-memory 4g --driver-memory 4g  --executor-cores 2  --num-executors 36  \
 ../${JAR}  -bigmediainput  ${INPUT_PATH_DAILY} -outputdaily ${OUTPUT_BIGMEDIA_DAILY_PATH} -outputgender ${GENDER_BIG_MEDIA_OUTPUT_PATH} -coalesce 50 \
  -last_sunday ${dt_yesterday}

if [ $? -ne 0 ];then
  exit 255
fi

mount_partition "ods_bigmedia_domestic_daily" "dt='${dt_yesterday}'" "${OUTPUT_BIGMEDIA_DAILY_PATH}"

mount_partition "ods_gender_bm" "dt='${dt_yesterday}'" "${GENDER_BIG_MEDIA_OUTPUT_PATH}"
unmount_partition "ods_gender_bm" "dt='${umount_time}'" "${UMOUNT_GENDER_BIG_MEDIA_OUTPUT_PATH}"


hadoop fs -test -e ${OUTPUT_BIGMEDIA_DAILY_PATH}
if [ $? -ne 0 ];then
  hadoop fs -mkdir  ${OUTPUT_BIGMEDIA_DAILY_PATH}
fi

hadoop fs -test -e ${GENDER_BIG_MEDIA_OUTPUT_PATH}
if [ $? -ne 0 ];then
  hadoop fs -mkdir  ${GENDER_BIG_MEDIA_OUTPUT_PATH}
fi

hadoop fs -touchz ${OUTPUT_BIGMEDIA_DAILY_PATH}/_SUCCESS
hadoop fs -touchz ${GENDER_BIG_MEDIA_OUTPUT_PATH}/_SUCCESS