1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
#!/usr/bin/env bash
source ../dmp_env.sh
source ././../ga_rawdata_analysis/common/tools.sh
today=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d")
dt_today=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d")
dt_today_dash=$(date -d "$ScheduleTime 1 days ago" +"%Y-%m-%d")
expire_path=$(date +%Y/%m/%d -d "-6 day $today")
expire_date=$(date +%Y%m%d -d "-6 day $today")
echo ${today}
INPUT_PATH="${ETL_MPARTICLE_ORG_DAILY}/yyyymmdd=${dt_today}"
DMP_USER_INFO_OUTPUT_PATH="${ODS_DMP_USER_INFO}/${today}/mparticle"
DMP_USER_INFO_UNMOUNT_PATH="${ODS_DMP_USER_INFO}/${expire_path}/mparticle"
DMP_INSTALL_LIST_OUTPUT_PATH="${DM_INSTALL_LIST}_v2/${today}/mparticle"
check_await "${INPUT_PATH}/_SUCCESS"
mount_partition "etl_mparticle_king_audience_org" "dt='${dt_today}'" "$INPUT_PATH"
mount_partition "dm_install_list_v2" "dt='${dt_today}', business='mparticle'" "${DMP_INSTALL_LIST_OUTPUT_PATH}"
mount_partition "ods_dmp_user_info" "dt='${dt_today}', business='mparticle'" "${DMP_USER_INFO_OUTPUT_PATH}"
hadoop fs -rm -r "${DMP_USER_INFO_OUTPUT_PATH}"
hadoop fs -rm -r "${DMP_INSTALL_LIST_OUTPUT_PATH}"
HIVE_CMD=$(hive_func)
$HIVE_CMD -v -hivevar dt_today ${dt_today} -hivevar update_date ${dt_today_dash} -f mparticle_king_total.sql
unmount_partition "ods_dmp_user_info" "dt='${expire_date}', business='mparticle'" "${DMP_USER_INFO_UNMOUNT_PATH}"
hadoop fs -test -e ${DMP_USER_INFO_OUTPUT_PATH}
if [ $? -ne 0 ];then
hadoop fs -mkdir ${DMP_USER_INFO_OUTPUT_PATH}
fi
hadoop fs -test -e ${DMP_INSTALL_LIST_OUTPUT_PATH}
if [ $? -ne 0 ];then
hadoop fs -mkdir ${DMP_INSTALL_LIST_OUTPUT_PATH}
fi
hadoop fs -touchz ${DMP_USER_INFO_OUTPUT_PATH}/_SUCCESS
hadoop fs -touchz ${DMP_INSTALL_LIST_OUTPUT_PATH}/_SUCCESS