Commit f6e0e5df by fan.jiang

id3876827262021090301 com.xunmeng.pinduoduo to rtdmp s3

parent 24ba3ada
......@@ -3,12 +3,44 @@ source ../../dmp_env.sh
dt_today=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d")
dt_slash_today=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d")
dt_day=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d")
dt_slash_day=$(date -d "$ScheduleTime" +"%Y/%m/%d")
check_await "${DM_INSTALL_LIST}_v2/${dt_slash_today}/dsp_req/_SUCCESS"
check_await "${DM_INSTALL_LIST}_v2/${dt_slash_today}/btop/_SUCCESS"
check_await "${DM_INSTALL_LIST}_v2/${dt_slash_day}/TO/_SUCCESS"
OUTPUT_PATH1="${RTDMP_TMP_PACKAGE_NAME_PATH}/id1142110895/${dt_slash_today}/"
OUTPUT_PATH2="${RTDMP_TMP_PACKAGE_NAME_PATH}/id3876827262021090301/${dt_slash_today}/"
OUTPUT_PATH3="${RTDMP_TMP_PACKAGE_NAME_PATH}/id3332062892021090301/${dt_slash_today}/"
OUTPUT_PATH4="${RTDMP_TMP_PACKAGE_NAME_PATH}/id13403763232021090301/${dt_slash_today}/"
OUTPUT_PATH5="${RTDMP_TMP_PACKAGE_NAME_PATH}/id10442830592021090301/${dt_slash_today}/"
OUTPUT_PATH6="${RTDMP_TMP_PACKAGE_NAME_PATH}/com_xunmeng_pinduoduo_bes/${dt_slash_today}/"
OUTPUT_PATH7="${RTDMP_TMP_PACKAGE_NAME_PATH}/com_ss_android_ugc_aweme_iqiyi/${dt_slash_today}/"
OUTPUT_PATH8="${RTDMP_TMP_PACKAGE_NAME_PATH}/id11421108952021090302/${dt_slash_today}/"
OUTPUT_PATH9="${RTDMP_TMP_PACKAGE_NAME_PATH}/com_xunmeng_pinduoduo_oppoziyou/${dt_slash_today}/"
OUTPUT_PATH10="${RTDMP_TMP_PACKAGE_NAME_PATH}/com_xunmeng_pinduoduo_oppoziyou_notinstall/${dt_slash_today}/"
OUTPUT_PATH11="${RTDMP_TMP_PACKAGE_NAME_PATH}/com_xunmeng_pinduoduo_oppoziyou_hist_notinstall/${dt_slash_today}/"
OUTPUT_PATH12="${RTDMP_TMP_PACKAGE_NAME_PATH}/com_xunmeng_pinduoduo_oppolianmeng/${dt_slash_today}/"
OUTPUT_PATH13="${RTDMP_TMP_PACKAGE_NAME_PATH}/com_xunmeng_pinduoduo_oppolianmeng_hist1year_notinstall/${dt_slash_today}/"
OUTPUT_PATH14="${RTDMP_TMP_PACKAGE_NAME_PATH}/com_xunmeng_pinduoduo_oppolianmeng_histhalfyear_notinstall/${dt_slash_today}/"
OUTPUT_PATH15="${RTDMP_TMP_PACKAGE_NAME_PATH}/com_xunmeng_pinduoduo/${dt_slash_today}/"
hadoop fs -rm -r "${OUTPUT_PATH1}"
hadoop fs -rm -r "${OUTPUT_PATH2}"
hadoop fs -rm -r "${OUTPUT_PATH3}"
hadoop fs -rm -r "${OUTPUT_PATH4}"
hadoop fs -rm -r "${OUTPUT_PATH5}"
hadoop fs -rm -r "${OUTPUT_PATH6}"
hadoop fs -rm -r "${OUTPUT_PATH7}"
hadoop fs -rm -r "${OUTPUT_PATH8}"
hadoop fs -rm -r "${OUTPUT_PATH9}"
hadoop fs -rm -r "${OUTPUT_PATH10}"
hadoop fs -rm -r "${OUTPUT_PATH11}"
hadoop fs -rm -r "${OUTPUT_PATH12}"
hadoop fs -rm -r "${OUTPUT_PATH13}"
hadoop fs -rm -r "${OUTPUT_PATH14}"
hadoop fs -rm -r "${OUTPUT_PATH15}"
spark-submit --class mobvista.dmp.datasource.dm.RtdmpTmpId1142110895 \
......@@ -18,9 +50,14 @@ spark-submit --class mobvista.dmp.datasource.dm.RtdmpTmpId1142110895 \
--conf spark.driver.maxResultSize=4g \
--conf spark.network.timeout=720s \
--files ${HIVE_SITE_PATH} \
--master yarn --deploy-mode cluster --executor-memory 8g --driver-memory 6g --executor-cores 6 --num-executors 70 \
--master yarn --deploy-mode cluster --executor-memory 8g --driver-memory 6g --executor-cores 6 --num-executors 130 \
../../${JAR} -dt_today ${dt_today} -output1 ${OUTPUT_PATH1} \
-coalesce 420
-dt_day ${dt_day} \
-output2 ${OUTPUT_PATH2} -output3 ${OUTPUT_PATH3} -output4 ${OUTPUT_PATH4} -output5 ${OUTPUT_PATH5} \
-output6 ${OUTPUT_PATH6} -output7 ${OUTPUT_PATH7} -output8 ${OUTPUT_PATH8} -output9 ${OUTPUT_PATH9} \
-output10 ${OUTPUT_PATH10} -output11 ${OUTPUT_PATH11} -output12 ${OUTPUT_PATH12} -output13 ${OUTPUT_PATH13} \
-output14 ${OUTPUT_PATH14} -output15 ${OUTPUT_PATH15} \
-coalesce 780
if [[ $? -ne 0 ]]; then
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment