1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
#!/usr/bin/env bash
# # # # # # # # # # # # # # # # # # # # # #
# @author : kehan
# @date : 2019-06-20
# # # # # # # # # # # # # # # # # # # # # #
source ../dmp_env.sh
today=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d")
yesterday=$(date -d "$ScheduleTime 2 days ago" +"%Y/%m/%d")
dt_today=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d")
dt_yesterday=$(date -d "$ScheduleTime 2 days ago" +"%Y%m%d")
echo ${today}
echo ${yesterday}
: '
#由日期获得上周日日期
week=`date -d "$today" +%w`
echo "week=$week"
if [ "$week" -eq "0" ];then
week=7 #若为周日,则表示为7
fi
if [ "$week" -eq "1" ];then
week=8 #若为周一,则表示为8,取上上周日数据
fi
last_sunday=$(date +%Y%m%d -d "-$week day $today")
'
INPUT_PATH="${FACEBOOK_LOG_PATH}/${today}"
OUTPUT_PATH="${FACEBOOK_DAILY_PATH}/${today}"
UNMATCHED_OUTPUT_PATH="${FACEBOOK_UNMATCHED_PATH}/${today}"
check_await "${FACEBOOK_LOG_PATH}/${today}/_SUCCESS"
check_await "${DEVICE_ID_MD5_MATCH_PATH}/${yesterday}/_SUCCESS"
mount_partition "etl_fb_org_daily" "dt='${dt_today}'" "$INPUT_PATH"
hadoop fs -rm -r "$OUTPUT_PATH/"
hadoop fs -rm -r "$UNMATCHED_OUTPUT_PATH/"
path_dir=$(pwd)
hdfs dfs -get s3://mob-emr-test/dataplatform/DataWareHouse/offline/myjar/hive-hcatalog-core-2.3.3.jar ${path_dir}/
spark-submit --class mobvista.dmp.datasource.facebook.FaceBookDaily \
--conf spark.yarn.executor.memoryOverhead=2048 \
--conf spark.network.timeout=720s \
--conf spark.default.parallelism=500 \
--conf spark.sql.autoBroadcastJoinThreshold=31457280 \
--master yarn --deploy-mode cluster --name facebook_daily_kehan --executor-memory 4g --driver-memory 4g --executor-cores 3 --num-executors 50 \
../${JAR} -input ${INPUT_PATH} -output ${OUTPUT_PATH} -unmatched ${UNMATCHED_OUTPUT_PATH} -coalesce 200 \
-today ${dt_today} -yesterday ${dt_yesterday} -last_sunday ${dt_yesterday}
if [ $? -ne 0 ]; then
exit 255
fi
mount_partition "etl_facebook_daily" "dt='${dt_today}'" "$OUTPUT_PATH"
mount_partition "etl_fb_unmatched_history" "dt='${dt_today}'" "$UNMATCHED_OUTPUT_PATH"
hadoop fs -touchz ${OUTPUT_PATH}/_SUCCESS