iqiyi_lahuo_df.sh 1.58 KB
Newer Older
wang-jinfeng committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39
#!/bin/sh

# # # # # # # # # # # # # # # # # # # # # #
# @author : wangjf
# @date : 2019-08-28 18:06:59
# # # # # # # # # # # # # # # # # # # # # #

source ../dmp_env.sh

LOG_TIME=$(date -d "$ScheduleTime" +"%Y%m%d")
date_path=$(date -d "$ScheduleTime" +"%Y/%m/%d")
dt_four_days=$(date -d "$ScheduleTime 4 days ago" +"%Y/%m/%d")

OUTPUT_PATH="${IQiYi_LAHUO_DAILY_TMP_PATH}/$date_path"

spark-submit --class mobvista.dmp.datasource.iqiyi.IQiYiLaHuoDF \
     --name "IQiYiLaHuoDF.${LOG_TIME}" \
     --conf spark.sql.shuffle.partitions=1000 \
     --conf spark.default.parallelism=1000 \
     --conf spark.kryoserializer.buffer.max=256m \
     --conf spark.driver.extraJavaOptions="-XX:+UseG1GC" \
     --conf spark.executor.extraJavaOptions="-XX:+UseG1GC" \
     --master yarn --deploy-mode cluster --executor-memory 4g --driver-memory 4g  --executor-cores 3  --num-executors 10 \
     ../${JAR} -date ${LOG_TIME} -partNum 10 -output ${OUTPUT_PATH} -cluster 'cluster_1st'

if [[ $? -ne 0 ]];then
  exit 255
fi

# 爱奇艺拉活是采用近四天的拉活结果入库,当天可拉活设备不到四天前的二分之一,就默认当天拉活出现问题,可能是接口访问为false,于是用四天前的数据覆盖
today_count=$(hadoop fs -text ${OUTPUT_PATH}/*|grep '"status":1'|wc -l)
FOUR_DAYS_OUTPUT_PATH="${IQiYi_LAHUO_DAILY_TMP_PATH}/$dt_four_days"
four_days_count=$(hadoop fs -text ${FOUR_DAYS_OUTPUT_PATH}/*|grep '"status":1'|wc -l)
four_days_count=`expr $four_days_count / 2`

if [[ $today_count -lt $four_days_count ]]
then
  hadoop distcp -m 20 -overwrite ${FOUR_DAYS_OUTPUT_PATH} ${OUTPUT_PATH}
fi