1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
#!/usr/bin/env bash
source ../dmp_env.sh
today=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d")
dt_today=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d")
umount_time=$(date -d "$ScheduleTime 7 days ago" +"%Y%m%d")
umount_date_path=$(date -d "$ScheduleTime 7 days ago" +"%Y/%m/%d")
INPUT_ADX_REQ_PATH="${ADN_ADX_REQ_ORG}/${today}"
INPUT_ADN_MIDWAY_BACKEND_PATH="${TOUTIAO_LAUNCH_PATH}/${today}"
OUTPUT_DIM_ADN_ADX_PKG_PATH="${DIM_ADN_ADX_PKG}/${today}"
UMOUNT_DIM_ADN_ADX_PKG_PATH="${DIM_ADN_ADX_PKG}/${umount_date_path}"
OUTPUT_ODS_ADX_TMP_PATH="${ODS_ADN_ADX_REQ_TMP}/${today}"
UMOUNT_OUTPUT_ODS_ADX_TMP_PATH="${ODS_ADN_ADX_REQ_TMP}/${umount_date_path}"
check_await "${INPUT_ADX_REQ_PATH}/virginia/23/_SUCCESS"
check_await "${INPUT_ADN_MIDWAY_BACKEND_PATH}/virginia/23/_SUCCESS"
mount_partition "dim_adn_adx_package" "dt='${dt_today}'" "${OUTPUT_DIM_ADN_ADX_PKG_PATH}"
hadoop fs -rm -r "${OUTPUT_DIM_ADN_ADX_PKG_PATH}/"
hadoop fs -rm -r "${OUTPUT_ODS_ADX_TMP_PATH}/"
spark-submit --class mobvista.dmp.datasource.adn_adx.AdnTecentAdxDataMidWay \
--conf spark.yarn.executor.memoryOverhead=2048 \
--conf spark.network.timeout=720s \
--conf spark.sql.shuffle.partitions=1000 \
--conf spark.default.parallelism=1000 \
--conf spark.sql.autoBroadcastJoinThreshold=31457280 \
--conf spark.executor.extraJavaOptions="-XX:+UseG1GC" \
--jars s3://mob-emr-test/dataplatform/DataWareHouse/offline/myjar/hive-hcatalog-core-2.3.3.jar,s3://mob-emr-test/dataplatform/DataWareHouse/offline/myjar/json-serde-1.3.7-jar-with-dependencies.jar \
--master yarn --deploy-mode cluster --name AdnTecentAdxDataMidWay --executor-memory 10g --driver-memory 4g --executor-cores 5 --num-executors 100 \
../${JAR} -outputadxtmp ${OUTPUT_ODS_ADX_TMP_PATH} -dimadxpkg ${OUTPUT_DIM_ADN_ADX_PKG_PATH} \
-coalesce 60 \
-today ${dt_today}
if [ $? -ne 0 ];then
exit 255
fi
mount_partition "ods_adn_adx_req_tmp" "dt='${dt_today}'" "${OUTPUT_ODS_ADX_TMP_PATH}"
unmount_partition "ods_adn_adx_req_tmp" "dt='${umount_time}'" "${UMOUNT_OUTPUT_ODS_ADX_TMP_PATH}"
unmount_partition "dim_adn_adx_package" "dt='${umount_time}'" "${UMOUNT_DIM_ADN_ADX_PKG_PATH}"
hadoop fs -touchz ${OUTPUT_ODS_ADX_TMP_PATH}/_SUCCESS
hadoop fs -touchz ${OUTPUT_DIM_ADN_ADX_PKG_PATH}/_SUCCESS