1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
#!/bin/bash
# # # # # # # # # # # # # # # # # # # # # #
# @file :3s_tracking_event_daily.sh
# @author :wangjf
# @revision:2019-04-30 16:20:25
# # # # # # # # # # # # # # # # # # # # # #
source ../../dmp_env.sh
today=${ScheduleTime:-$1}
date=$(date +"%Y%m%d" -d "-1 day $today")
before_date=$(date +"%Y%m%d" -d "-2 day $today")
date_path=$(date +%Y/%m/%d -d "-1 day $today")
expire_date=$(date +%Y%m%d -d "-2 day $today")
expire_date_path=$(date +%Y/%m/%d -d "-2 day $today")
PATH_3S_EVENT="s3://trackingcsv-3s/trackingcsv/event"
INPUT_PATH_3S="${PATH_3S_EVENT}/$date_path/"
check_await "${INPUT_PATH_3S}/23/45/_SUCCESS"
output_path="s3://mob-emr-test/dataplatform/DataWareHouse/data/dwh/etl_3s_event_daily/${date_path}"
info_output_path="s3://mob-emr-test/dataplatform/DataWareHouse/data/dev/event_info/${date_path}"
spark-submit --class mobvista.dmp.datasource.datatory.TrackingEventDaily \
--conf spark.sql.shuffle.partitions=10 \
--conf spark.default.parallelism=10 \
--conf spark.sql.files.maxPartitionBytes=536870912 \
--conf spark.executor.extraJavaOptions="-XX:+UseG1GC" \
--master yarn --deploy-mode cluster --executor-memory 4g --driver-memory 4g --executor-cores 2 --num-executors 50 \
../../${JAR} -date ${date} -output ${output_path} -info_output ${info_output_path} -before_date ${before_date}
if [[ $? -ne 0 ]]; then
exit 255
fi
# 挂载分区
mount_partition "etl_3s_event_daily" "dt='${date}'" "${output_path}"
common_mount_partition "dev" "event_info" "dt='${date}'" "${info_output_path}"