1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
#!/bin/sh
# # # # # # # # # # # # # # # # # # # # # #
# @author : jiangfan
# @date : 2021-01-24 12:06:00
# # # # # # # # # # # # # # # # # # # # # #
#!/usr/bin/env bash
source ../../dmp_env.sh
source ././../../ga_rawdata_analysis/common/tools.sh
dt_today=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d")
dt_two_days_ago=$(date -d "$ScheduleTime 2 days ago" +"%Y%m%d")
seven_days_ago=$(date -d "$ScheduleTime 7 days ago" +"%Y%m%d")
fifteen_days_ago=$(date -d "$ScheduleTime 15 days ago" +"%Y%m%d")
dt_dash_tow_days=$(date -d "$ScheduleTime 2 days ago" +"%Y-%m-%d")
dt_dash_one_days=$(date -d "$ScheduleTime 1 days ago" +"%Y-%m-%d")
dt_slash_today=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d")
year=${dt_today:0:4}
month=${dt_today:4:2}
day=${dt_today:6:2}
hh="23"
check_await "${UPARPU_PLUGIN_QCC_PACKAGE}/yyyy=${year}/mm=${month}/dd=${day}"
#检查表分区是否已经存在,不存在等待五个小时退出脚本
partition_info="dt='${dt_two_days_ago}',business='btop'"
check_await_hive_partition "dwh.dm_install_list_v2 " $partition_info 18000
partition_info="yyyy='${year}',mm='${month}',dd='${day}',hh='${hh}'"
check_await_hive_partition "uparpu_main.uparpu_strategy_app_v2" $partition_info 18000
sleep 50
OUTPUT_PATH="${BTOP_DAILY_PATH}/${dt_slash_today}"
spark-submit --class mobvista.dmp.datasource.btop.BtopDaily \
--conf spark.network.timeout=720s \
--conf spark.driver.maxResultSize=4g \
--conf spark.default.parallelism=3000 \
--conf spark.sql.shuffle.partitions=3000 \
--conf spark.sql.broadcastTimeout=1200 \
--conf spark.sql.autoBroadcastJoinThreshold=31457280 \
--master yarn --deploy-mode cluster --executor-memory 6g --driver-memory 4g --executor-cores 2 --num-executors 60 \
../../${JAR} \
-output ${OUTPUT_PATH} -coalesce 200 -dt_today ${dt_today} -dt_dash_tow_days ${dt_dash_tow_days} -seven_days_ago ${seven_days_ago} -fifteen_days_ago ${fifteen_days_ago} \
-dt_two_days_ago ${dt_two_days_ago} -dt_dash_one_days ${dt_dash_one_days}
if [[ $? -ne 0 ]];then
exit 255
fi
mount_partition "etl_btop_daily" "dt='${dt_today}'" "$OUTPUT_PATH"
hadoop fs -touchz ${OUTPUT_PATH}/_SUCCESS