1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
#!/bin/sh
# # # # # # # # # # # # # # # # # # # # # #
# @author : jiangfan
# @date : 2021-01-24 12:06:00
# # # # # # # # # # # # # # # # # # # # # #
#!/usr/bin/env bash
source ../../dmp_env.sh
source ././../../ga_rawdata_analysis/common/tools.sh
dt_today=$(date -d "$ScheduleTime" +"%Y%m%d")
dt_dash_today=$(date -d "$ScheduleTime" +"%Y-%m-%d")
dt_slash_today=$(date -d "$ScheduleTime" +"%Y/%m/%d")
#检查表分区是否已经存在,不存在等待十个小时退出脚本
partition_info="dt='$dt_dash_today'"
check_await_hive_partition "uparpu_main.uparpu_device_active" $partition_info 36000
OUTPUT_PATH="${TO_DAILY_PATH}/${dt_slash_today}"
spark-submit --class mobvista.dmp.datasource.TO.TODaily \
--conf spark.network.timeout=720s \
--conf spark.default.parallelism=3000 \
--conf spark.sql.shuffle.partitions=3000 \
--conf spark.sql.broadcastTimeout=1200 \
--conf spark.sql.autoBroadcastJoinThreshold=31457280 \
--master yarn --deploy-mode cluster --executor-memory 8g --driver-memory 4g --executor-cores 5 --num-executors 20 \
../../${JAR} \
-output ${OUTPUT_PATH} -coalesce 200 -dt_dash_today ${dt_dash_today}
if [[ $? -ne 0 ]];then
exit 255
fi
mount_partition "etl_to_daily" "dt='${dt_today}'" "$OUTPUT_PATH"
hadoop fs -touchz ${OUTPUT_PATH}/_SUCCESS