1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
#!/bin/sh
# # # # # # # # # # # # # # # # # # # # # #
# @author : 刘凯
# @date : 2018-01-16s
@desc : 抽取dim_app_info_adr数据
# # # # # # # # # # # # # # # # # # # # # #
source ../dmp_env.sh
#export SPARK_HOME=/data/liukai/spark
#export PATH=$PATH:$SPARK_HOME/bin
LOG_TIME=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d")
date_path=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d")
OUTPUT_PATH="${DIM_APP_INFO_ADR}/$date_path/etl"
## JAR="DMP.jar"
CHECK_PATH="${APP_INFO_ADR_PATH}/$date_path"
check_await "${CHECK_PATH}/_SUCCESS"
echo "dim_app_info_adr file success exist, and then can start"
: '
spark-submit --class mobvista.dmp.datasource.app_info_tag.Etl_app_info_adr \
--conf spark.yarn.executor.memoryOverhead=2048 --conf spark.network.timeout=720s \
--master yarn --deploy-mode cluster --name etl_app_info_adr --conf spark.app.loadTime=${LOG_TIME} --conf spark.app.output_path=${OUTPUT_PATH} --executor-memory 6g --driver-memory 4g --executor-cores 2 --num-executors 30 \
../${JAR}
'
spark-submit --class mobvista.dmp.datasource.app_info_tag.Etl_app_info_adr_v2 \
--name "mobvista.dmp.datasource.app_info_tag.Etl_app_info_adr_v2_wangjf_${LOG_TIME}" \
--conf spark.yarn.executor.memoryOverhead=2048 \
--master yarn --deploy-mode cluster --executor-memory 4g --driver-memory 4g --executor-cores 2 --num-executors 2 \
../${JAR} \
-date ${LOG_TIME} -output ${OUTPUT_PATH}
if [[ $? -ne 0 ]];then
exit 255
fi
mount_partition "dim_app_info_adr_category" "year='${LOG_TIME:0:4}', month='${LOG_TIME:4:2}', day='${LOG_TIME:6:2}', business='etl'" "$OUTPUT_PATH"