#!/usr/bin/env bash source ../../dmp_env.sh dt_today=$(date -d "$ScheduleTime 1 days ago" +"%Y%m%d") dt_dash_today=$(date -d "$ScheduleTime 1 days ago" +"%Y/%m/%d") update=$(date -d "$ScheduleTime 90 days ago" +"%Y-%m-%d") check_await "${TMP_EGGPLANTS_OUTPUT_PATH}/${dt_dash_today}" hadoop fs -test -e "${ODS_OTHER_DEVICE_DAILY}/${dt_dash_today}" if [ $? -ne 0 ];then hadoop fs -mkdir -p "${ODS_OTHER_DEVICE_DAILY}/${dt_dash_today}" fi OUTPUT_PATH01="${Shinny_Package_Names_Result}/${dt_dash_today}/01" OUTPUT_PATH02="${Shinny_Package_Names_Result}/${dt_dash_today}/02" hadoop fs -rm -r "${OUTPUT_PATH01}" hadoop fs -rm -r "${OUTPUT_PATH02}" spark-submit --class mobvista.dmp.datasource.dm.ShinnyPackageNames \ --conf spark.yarn.executor.memoryOverhead=4096 \ --conf spark.reducer.maxBlocksInFlightPerAddress=3 \ --conf spark.default.parallelism=3000 \ --conf spark.sql.shuffle.partitions=3000 \ --conf spark.network.timeout=720s \ --files ${HIVE_SITE_PATH} \ --master yarn --deploy-mode cluster --executor-memory 6g --driver-memory 4g --executor-cores 2 --num-executors 220 \ ../../${JAR} -dt_today ${dt_today} -update ${update} -Shinny_Package_Names ${Shinny_Package_Names} -output01 ${OUTPUT_PATH01} -output02 ${OUTPUT_PATH02} -coalesce 200 if [[ $? -ne 0 ]]; then exit 255 fi #hadoop distcp -m20 "${OUTPUT_PATH01}/*" "${TMP_EGGPLANTS_OUTPUT_PATH}/${dt_dash_today}/" #hadoop distcp -m20 "${OUTPUT_PATH02}/*" "${ODS_OTHER_DEVICE_DAILY}/${dt_dash_today}/"