1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
#!/bin/bash
# # # # # # # # # # # # # # # # # # # # # #
# @file : adn_sdk_v2_install_v1.sh
# @author : jinfeng.wang
# @time : 2020-05-22 16:06:07
# # # # # # # # # # # # # # # # # # # # # #
source ../dmp_env.sh
LOG_TIME=${ScheduleTime:-$1}
date=$(date +%Y%m%d -d "-1 day $LOG_TIME")
date_path=$(date +"%Y/%m/%d" -d "-1 day ${LOG_TIME}")
year=${date:0:4}
month=${date:4:2}
day=${date:6:2}
BUSINESS="adn_sdk_v2"
INPUT_MAPPING="${RUID_MAPPING}/${date_path}"
check_await "${INPUT_MAPPING}/_SUCCESS"
OUTPUT_PATH="${DM_INSTALL_LIST}_v1/$date_path/${BUSINESS}"
spark-submit --class mobvista.dmp.datasource.dm.FixInstallListRuid \
--name "FixInstallListRuid.${date}.${BUSINESS}" \
--conf spark.sql.shuffle.partitions=1000 \
--conf spark.default.parallelism=1000 \
--conf spark.kryoserializer.buffer.max=512m \
--conf spark.kryoserializer.buffer=64m \
--conf spark.sql.adaptive.enabled=true \
--conf spark.sql.adaptive.advisoryPartitionSizeInBytes=536870912 \
--master yarn --deploy-mode cluster --executor-memory 8g --driver-memory 4g --executor-cores 3 --num-executors 40 \
../${JAR} \
-date ${date} -coalesce 200 -output $OUTPUT_PATH -business ${BUSINESS} -input ${INPUT_MAPPING}
if [[ $? -ne 0 ]]; then
exit 255
fi
mount_partition "dm_install_list_v1" "year='$year', month='$month', day='$day', business='${BUSINESS}'" "$OUTPUT_PATH" || exit 1
echo "[Adn Sdk Pkg Total V1 End!]"