#!/bin/bash

source ../dmp_env.sh

LOG_TIME=$(date +%Y%m%d -d "-1 day $ScheduleTime")
date_path=$(date -d "$date 1 days ago" "+%Y/%m/%d")
before_date_path=$(date -d "$date 2 days ago" "+%Y/%m/%d")

DAILY_INPUT="${ETL_DSP_REQ_DAILY}/${date_path}"
check_await "${DAILY_INPUT}/_SUCCESS"

sleep 60

BEFORE_PATH="${DSP_PROFILE_TOTAL}/${before_date_path}"
check_await "${BEFORE_PATH}/_SUCCESS"

OUTPUT="${DSP_PROFILE_TOTAL}/${date_path}"

expire_date=$(date +%Y%m%d -d "-10 day $ScheduleTime")
expire_date_path=$(date -d "$date 10 days ago" "+%Y/%m/%d")
EXPIRE_OUTPUT_PATH="${DSP_PROFILE_TOTAL}/${expire_date_path}"

spark-submit --class mobvista.dmp.datasource.dsp.DspDeviceProfile \
    --conf spark.sql.shuffle.partitions=4000 \
    --conf spark.default.parallelism=2000 \
    --conf spark.kryoserializer.buffer.max=512m \
    --conf spark.kryoserializer.buffer=64m \
    --master yarn --deploy-mode cluster --executor-memory 10g --driver-memory 4g  --executor-cores 4 --num-executors 100 \
    ../${JAR} \
    -date ${LOG_TIME} -output ${OUTPUT}

if [[ $? -ne 0 ]];then
  exit 255
fi

mount_partition "dsp_profile_total" "dt='$LOG_TIME'" "$OUTPUT"

# 删除过期的分区及删除对应路径
unmount_partition "dsp_profile_total" "dt='${expire_date}'" "${EXPIRE_OUTPUT_PATH}"