1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
#!/bin/bash
# # # # # # # # # # # # # # # # # # # # # #
# @file :dsp_app_package_name.sh
# @author :liushuai
# @revision:2017-03-27 20:09
# @desc :从dsp 的spp表中读取appid与app_name
# # # # # # # # # # # # # # # # # # # # # #
source ../prd_env.sh
# 挂载hive分区
# $1 table name
# $2 partition
# $3 hdfs path
mount_partition() {
local MOUNT_PARTITION=""
hive_cmd "
use dev;
ALTER TABLE $1 ADD IF NOT EXISTS PARTITION ($2)
LOCATION '$3';
" || exit 1
}
LOG_TIME=$(date +%Y%m%d -d "-2 day $ScheduleTime")
year=${LOG_TIME:0:4}
month=${LOG_TIME:4:2}
day=${LOG_TIME:6:2}
FILE="s3://mob-emr-test/liushuai/prd/${year}/${month}/${day}/four/app_name/dspApp.xls"
while :
do
hadoop fs -test -e ${FILE}
if [ $? -ne 0 ]; then
sleep 1h
continue
fi
break
done
rm ./dspApp.xls
hadoop fs -get ${FILE}
if [ $? -ne 0 ]; then
exit 255
fi
java -classpath ../${JAR} mobvista.prd.datasource.table.ReadDspAppXls
if [ $? -ne 0 ]; then
exit 255
fi
OUTPUT_PATH="s3://mob-emr-test/liushuai/prd/${year}/${month}/${day}/four/dsp_app_name"
hadoop fs -mkdir -p ${OUTPUT_PATH}
hadoop fs -rm "${OUTPUT_PATH}/dspApp.txt"
hadoop fs -put dspApp.txt ${OUTPUT_PATH}
rm ./*.txt
mount_partition "dsp_app_id_name" "year='$year', month='$month', day='$day'" "$OUTPUT_PATH"