1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
#!/bin/bash
# # # # # # # # # # # # # # # # # # # # # #
# @file : adn_request_other_install.sh
# @author: feng.liang
# @date : 2018-05-29
# @desc : 将天数据合并到安装列表
# # # # # # # # # # # # # # # # # # # # # #
source ../../dmp_env.sh
LOG_TIME=$(date -d "$ScheduleTime 1 days ago" "+%Y%m%d")
old_path=$(date -d "$ScheduleTime 2 days ago" "+%Y/%m/%d")
date_path=$(date -d "$ScheduleTime 1 days ago" "+%Y/%m/%d")
date=$(date +"%Y-%m-%d" -d "-1 day $ScheduleTime")
INPUT_PATH="$ETL_ADN_REQUEST_OTHER_DAILY/${date_path}/"
OLD_INPUT_PATH="${DM_INSTALL_LIST}/$old_path/adn_request_other"
OUTPUT_PATH="$DM_INSTALL_LIST/$date_path/adn_request_other"
check_await $INPUT_PATH/_SUCCESS
hadoop fs -rm -r "$OUTPUT_PATH"
spark-submit --class mobvista.dmp.datasource.adn_request_other.AdnRequestOtherInstall \
--conf spark.yarn.executor.memoryOverhead=2048 \
--conf spark.sql.shuffle.partitions=2000 \
--files ${HIVE_SITE_PATH} \
--master yarn --deploy-mode cluster --executor-memory 6g --driver-memory 4g --executor-cores 2 --num-executors 200 \
../../${JAR} \
-input "${INPUT_PATH}" -output $OUTPUT_PATH -date $date -oldInput $OLD_INPUT_PATH -parallelism 2000 -coalesce 2000
if [ $? -ne 0 ]; then
exit 255
fi
mount_partition "dm_install_list" "year='${LOG_TIME:0:4}', month='${LOG_TIME:4:2}', day='${LOG_TIME:6:2}', business='adn_request_other'" "$OUTPUT_PATH" || exit 1
echo "[Etl adn request other install end!]"