merge_install_age.sh 2.36 KB
Newer Older
wang-jinfeng committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61
#! /bin/bash

# # # # # # # # # # # # # # # # # # # # # #
# @file    :extract_device.sh
# @author  :liushuai
# @revision:2017-01-18 16:12
# # # # # # # # # # # # # # # # # # # # # #

source ../dmp_env.sh

LOG_TIME=$(date +%Y%m%d -d "-1 day $ScheduleTime")
year=${LOG_TIME:0:4}
month=${LOG_TIME:4:2}
day=${LOG_TIME:6:2}

date_path=$(date +'%Y/%m/%d' -d "-1 day $ScheduleTime")

GA_AGE_PATH="${AGE_GET_GA_PATH}/${date_path}"
DSP_AGE_PATH="${AGE_GET_DSP_PATH}/${date_path}"

# INSTALL_3S_INPUT="${DMP_INSTALL_LIST}/${date_path}/3s"
# ADN_INSTALL_INPUT="${DMP_INSTALL_LIST}/${date_path}/adn_install"
# ADN_REQUEST_INPUT="${DMP_INSTALL_LIST}/${date_path}/adn_request_sdk"
# DSP_REQUEST_INPUT="${DMP_INSTALL_LIST}/${date_path}/dsp_req"
# GA_INSTALL_DATE=$(get_recently_date "${DMP_INSTALL_LIST}" "${LOG_TIME}" "ga")
# OTHER_DATE=$(get_recently_date "${DMP_INSTALL_LIST}" "${LOG_TIME}" "other")

INSTALL_LIST_INPUT="${DMP_INSTALL_LIST}/${date_path}/14days"

# check_await ${INSTALL_3S_INPUT}/_SUCCESS
# check_await ${ADN_INSTALL_INPUT}/_SUCCESS
# check_await ${ADN_REQUEST_INPUT}/_SUCCESS
# check_await ${DSP_REQUEST_INPUT}/_SUCCESS

check_await ${INSTALL_LIST_INPUT}/_SUCCESS

check_await ${GA_AGE_PATH}/_SUCCESS
check_await ${DSP_AGE_PATH}/_SUCCESS

OUTPUT_PATH="${AGE_MERGE_INSTALL}/${year}/${month}/${day}/"

# --conf spark.kubernetes.executor.deleteOnTermination=false \
# --conf spark.kubernetes.container.image=818539432014.dkr.ecr.us-east-1.amazonaws.com/engineplus/spark:3.0.1-mobvista-v1.1.7 \
# --conf spark.hadoop.fs.s3.impl="com.amazon.ws.emr.hadoop.fs.EmrFileSystem" \
spark-submit --class mobvista.dmp.datasource.age_gender.MergeInstallAge \
     --name "MergeInstallAge.${LOG_TIME}" \
     --conf spark.yarn.executor.memoryOverhead=2048 \
     --conf spark.sql.shuffle.partitions=5000 \
     --conf spark.default.parallelism=2000 \
     --conf spark.shuffle.memoryFraction=0.4 \
     --conf spark.storage.memoryFraction=0.4 \
     --conf spark.sql.files.maxPartitionBytes=536870912 \
     --conf spark.sql.adaptive.enabled=true \
     --conf spark.sql.adaptive.advisoryPartitionSizeInBytes=536870912 \
     --master yarn --deploy-mode cluster --executor-memory 10g --driver-memory 4g  --executor-cores 5  --num-executors 60 \
     ../${JAR} -date ${LOG_TIME} \
     -ga_age_path ${GA_AGE_PATH}  -dsp_age_path ${DSP_AGE_PATH} -age_output ${OUTPUT_PATH} -parallelism 2000

if [[ $? -ne 0 ]];then
  exit 255
fi