alipay_lahuo_data_to_dmp.sh 3.3 KB
Newer Older
wang-jinfeng committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35
#!/usr/bin/env bash

source ../../dmp_env.sh
source ././../../ga_rawdata_analysis/common/tools.sh

echo "job  begin!!!"

dt_today=$(date -d "$ScheduleTime tomorrow" +"%Y%m%d")
dt_three_days_ago=$(date -d "$ScheduleTime tomorrow" +"%Y%m%d")
dt_slash_today=$(date -d "$ScheduleTime tomorrow" +"%Y/%m/%d")
update=$(date -d "$ScheduleTime tomorrow" +"%Y-%m-%d")

dt_oneday_ago=$(date -d "$ScheduleTime" +"%Y/%m/%d")
hour="01"

IMEIMD5_REQUEST_INPUT_PATH="${ALIPAY_LAHUO_DAILY_PATH}/${dt_oneday_ago}/imeimd5_request_data/${hour}"

IMEIMD5_RESPONSE_INPUT_PATH="${ALIPAY_LAHUO_DAILY_TMP_PATH}/${dt_slash_today}/imeimd5/${hour}"

OUTPUT01="${ALIPAY_ACTIVATION_DAILY_PATH}/${dt_slash_today}/${hour}/alipay_activation"

OUTPUT02="${ALIPAY_ACQUISITION_DAILY_PATH}/${dt_slash_today}/${hour}/alipay_acquisition"

check_await "${IMEIMD5_RESPONSE_INPUT_PATH}/_SUCCESS"

hadoop fs -rm -r "${OUTPUT01}"
hadoop fs -rm -r "${OUTPUT02}"

spark-submit --class mobvista.dmp.datasource.taobao.AlipayTmpDataToDmp \
 --conf spark.network.timeout=720s \
 --conf spark.default.parallelism=2000 \
 --conf spark.sql.shuffle.partitions=2000 \
 --conf spark.sql.broadcastTimeout=1200 \
 --conf spark.yarn.executor.memoryOverhead=4096 \
 --conf spark.sql.autoBroadcastJoinThreshold=31457280 \
WangJinfeng committed
36
 --deploy-mode cluster  --executor-memory 8g --driver-memory 4g  --executor-cores 4  --num-executors 40 \
wang-jinfeng committed
37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
 ../../${JAR}  -imeiRequestInput ${IMEIMD5_REQUEST_INPUT_PATH}  -imeiResponseInput ${IMEIMD5_RESPONSE_INPUT_PATH} \
  -output01 ${OUTPUT01}  -output02 ${OUTPUT02}


if [ $? -ne 0 ];then
  exit 255
fi


mount_partition "etl_alipay_activation_daily" "dt='${dt_today}', hh='${hour}', business='alipay_activation'" "${OUTPUT01}"

mount_partition "etl_alipay_acquisition_daily" "dt='${dt_today}', hh='${hour}', business='alipay_acquisition'" "${OUTPUT02}"


if [ $? -ne 0 ];then
  exit 255
fi

#ACTIVATIONOUTPUT="${ALIPAY_ACTIVATION_DAILY_PATH}/${dt_slash_today}/alipay_activation_deduplication"
#ACQUISITIONOUTPUT="${ALIPAY_ACQUISITION_DAILY_PATH}/${dt_slash_today}/alipay_acquisition_deduplication"
#
#spark-submit --class mobvista.dmp.datasource.taobao.AlipayLaHuoFourDaysDataDeduplication \
# --conf spark.network.timeout=720s \
# --conf spark.default.parallelism=2000 \
# --conf spark.sql.shuffle.partitions=2000 \
# --conf spark.sql.broadcastTimeout=1200 \
# --conf spark.yarn.executor.memoryOverhead=4096 \
# --conf spark.sql.autoBroadcastJoinThreshold=31457280 \
WangJinfeng committed
65
# --deploy-mode cluster  --executor-memory 8g --driver-memory 4g  --executor-cores 4  --num-executors 40 \
wang-jinfeng committed
66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93
# ../../${JAR}  -dt_today ${dt_today}  -dt_three_days_ago ${dt_three_days_ago} \
#  -ActivationOutput ${ACTIVATIONOUTPUT}  -AcquisitionOutput ${ACQUISITIONOUTPUT}
#
#if [ $? -ne 0 ];then
#  exit 255
#fi
#
#mount_partition "etl_alipay_activation_daily" "dt='${dt_today}', business='alipay_activation_deduplication'" "${ACTIVATIONOUTPUT}"
#mount_partition "etl_alipay_acquisition_daily" "dt='${dt_today}', business='alipay_acquisition_deduplication'" "${ACQUISITIONOUTPUT}"

#if [ $? -ne 0 ];then
#  exit 255
#fi
#
#HIVE_CMD=$(hive_func)
#
#$HIVE_CMD	-v  -hivevar  dt_today ${dt_today}     -f	alipay_lahuo_data_to_dmp.sql


if [ $? -ne 0 ];then
  exit 255
fi

#hadoop fs -touchz "${ALIPAY_ACTIVATION_DAILY_PATH}/${dt_slash_today}/_SUCCESS"
hadoop fs -touchz "${ALIPAY_ACQUISITION_DAILY_PATH}/${dt_slash_today}/${hour}/_SUCCESS"

echo "good job"