#!/bin/sh # # # # # # # # # # # # # # # # # # # # # # # @author : 冯亮 # @date : 2017-08-15 # @desc : 将抢发数据合并到安装列表 # # # # # # # # # # # # # # # # # # # # # # source ../dmp_env.sh LOG_TIME=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d") dt=$(date -d "$ScheduleTime 1 days ago" +"%Y-%m-%d") date_path=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d") old_date_path=$(date -d "$ScheduleTime 2 days ago" +"%Y/%m/%d") INPUT_PATH="${CLEVER_DAILY_PATH}/$date_path" OLD_INPUT_PATH="${DM_INSTALL_LIST}/$old_date_path/clever" OUTPUT_PATH="${DM_INSTALL_LIST}/$date_path/clever" check_await "$INPUT_PATH/_SUCCESS" check_await "$OLD_INPUT_PATH/_SUCCESS" hadoop fs -rm -r "$OUTPUT_PATH/" spark-submit --class mobvista.dmp.datasource.clever.CleverInstallList \ --conf spark.yarn.executor.memoryOverhead=2048 --conf spark.network.timeout=720s --conf spark.app.tag=-1 \ --master yarn --deploy-mode cluster --executor-memory 6g --driver-memory 4g --executor-cores 2 --num-executors 20 \ ../${JAR} -input $INPUT_PATH -oldInput $OLD_INPUT_PATH -output $OUTPUT_PATH -date $dt -parallelism 200 -coalesce 20 if [ $? -ne 0 ];then exit 255 fi mount_partition "dm_install_list" "year='${LOG_TIME:0:4}', month='${LOG_TIME:4:2}', day='${LOG_TIME:6:2}', business='clever'" "$OUTPUT_PATH"