uc_lahuo_data_to_dmp.sh 2.8 KB
Newer Older
wang-jinfeng committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43
#!/usr/bin/env bash

source ../../dmp_env.sh
source ././../../ga_rawdata_analysis/common/tools.sh

echo "job  begin!!!"


dt_today=$(date -d "$ScheduleTime" +"%Y%m%d")
dt_slash_today=$(date -d "$ScheduleTime" +"%Y/%m/%d")
update=$(date -d "$ScheduleTime" +"%Y-%m-%d")

dt_oneday_ago=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d")

UC_IMEIMD5_REQUEST_INPUT_PATH="${UC_LAHUO_DAILY_PATH}/${dt_oneday_ago}/imeimd5_request_data"
UC_OAIDMD5_REQUEST_INPUT_PATH="${UC_LAHUO_DAILY_PATH}/${dt_oneday_ago}/oaidmd5_request_data"


IMEIMD5_RESPONSE_INPUT_PATH="${UC_LAHUO_DAILY_TMP_PATH}/${dt_slash_today}/imeimd5"
OAIDMD5_RESPONSE_INPUT_PATH="${UC_LAHUO_DAILY_TMP_PATH}/${dt_slash_today}/oaidmd5"

IMEIMD5_OUTPUT="${UC_LAHUO_TMP_DAILY_TO_S3}/${dt_slash_today}/imeimd5/foractivation"
OAIDMD5_OUTPUT="${UC_LAHUO_TMP_DAILY_TO_S3}/${dt_slash_today}/oaidmd5/foractivation"

IMEIMD5_NOT_ACTIVATION_OUTPUT="${UC_LAHUO_TMP_DAILY_TO_S3}/${dt_slash_today}/imeimd5/notforactivation"
OAIDMD5_NOT_ACTIVATION_OUTPUT="${UC_LAHUO_TMP_DAILY_TO_S3}/${dt_slash_today}/oaidmd5/notforactivation"

check_await "${IMEIMD5_RESPONSE_INPUT_PATH}/_SUCCESS"
check_await "${OAIDMD5_RESPONSE_INPUT_PATH}/_SUCCESS"

hadoop fs -rm -r "${IMEIMD5_OUTPUT}"
hadoop fs -rm -r "${OAIDMD5_OUTPUT}"
hadoop fs -rm -r "${IMEIMD5_NOT_ACTIVATION_OUTPUT}"
hadoop fs -rm -r "${OAIDMD5_NOT_ACTIVATION_OUTPUT}"

spark-submit --class mobvista.dmp.datasource.taobao.UCTmpDataToDMP \
 --conf spark.network.timeout=720s \
 --conf spark.default.parallelism=2000 \
 --conf spark.sql.shuffle.partitions=2000 \
 --conf spark.sql.broadcastTimeout=1200 \
 --conf spark.yarn.executor.memoryOverhead=4096 \
 --conf spark.sql.autoBroadcastJoinThreshold=31457280 \
 --files ${HIVE_SITE_PATH} \
44
 --master yarn --deploy-mode cluster  --executor-memory 8g --driver-memory 4g  --executor-cores 4  --num-executors 40 \
wang-jinfeng committed
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
 ../../${JAR}  -imeiRequestInput ${UC_IMEIMD5_REQUEST_INPUT_PATH}  -oaidRequestInput ${UC_OAIDMD5_REQUEST_INPUT_PATH} \
  -imeiResponseInput ${IMEIMD5_RESPONSE_INPUT_PATH}  -oaidResponseInput ${OAIDMD5_RESPONSE_INPUT_PATH}   \
  -imeiOutput ${IMEIMD5_OUTPUT}  -oaidOutput ${OAIDMD5_OUTPUT}   \
  -imeiNotActivationOutput ${IMEIMD5_NOT_ACTIVATION_OUTPUT}  -oaidNotActivationOutput ${OAIDMD5_NOT_ACTIVATION_OUTPUT}   \
  -update ${update}


if [ $? -ne 0 ];then
  exit 255
fi

DMP_INSTALL_LIST_OUTPUT_PATH="${DM_INSTALL_LIST}_v2/${dt_slash_today}/uc_activation"
mount_partition "dm_install_list_v2" "dt='${dt_today}', business='uc_activation'" "${DMP_INSTALL_LIST_OUTPUT_PATH}"
mount_partition "uc_lahuo_tmp_daily_to_s3" "dt='${dt_today}', business='uc_activation'" "${UC_LAHUO_TMP_DAILY_TO_S3}/${dt_slash_today}"

HIVE_CMD=$(hive_func)

$HIVE_CMD	-v  -hivevar  dt_today ${dt_today}     -f	uc_lahuo_data_to_dmp.sql


if [ $? -ne 0 ];then
  exit 255
fi

#hadoop fs -touchz ${DMP_INSTALL_LIST_OUTPUT_PATH}/_SUCCESS
echo "good job"