#!/bin/bash

# # # # # # # # # # # # # # # # # # # # # #
# @file    :app_get_id_name.sh
# @author  :liushuai
# @revision:2017-03-14 10:45
# @desc    :setting日志得到的app_id与pkg_name对应表 join 从表中读到的app_id,app_name,platform,impression
# # # # # # # # # # # # # # # # # # # # # #


source ../prd_env.sh

LOG_TIME=$(date +%Y%m%d -d "-2 day $ScheduleTime")
year=${LOG_TIME:0:4}
month=${LOG_TIME:4:2}
day=${LOG_TIME:6:2}

LOG_TIME=$(date +%Y%m%d -d "-3 day $ScheduleTime")
day2=${LOG_TIME:6:2}
LOG_TIME=$(date +%Y%m%d -d "-4 day $ScheduleTime")
day3=${LOG_TIME:6:2}
LOG_TIME=$(date +%Y%m%d -d "-5 day $ScheduleTime")
day4=${LOG_TIME:6:2}
LOG_TIME=$(date +%Y%m%d -d "-6 day $ScheduleTime")
day5=${LOG_TIME:6:2}
LOG_TIME=$(date +%Y%m%d -d "-7 day $ScheduleTime")
day6=${LOG_TIME:6:2}
LOG_TIME=$(date +%Y%m%d -d "-8 day $ScheduleTime")
day7=${LOG_TIME:6:2}



# 挂载hive分区
# $1 table name
# $2 partition
# $3 hdfs path
mount_partition() {
local MOUNT_PARTITION=""
hive_cmd "
use dev;
ALTER TABLE $1 ADD IF NOT EXISTS PARTITION ($2)
    LOCATION '$3';
" || exit 1
}


INPUT_PATH1="s3://mob-emr-test/dataplatform/DataWareHouse/data/Nginx/GlobalSetting/${year}/${month}/{${day},${day2},${day3},${day4},${day5},${day6},${day7}}/"
OUTPUT_PATH="s3://mob-emr-test/liushuai/prd/${year}/${month}/${day}/four/app"
APP_NAME_PATH="s3://mob-emr-test/liushuai/prd/${year}/${month}/${day}/four/app_name"
FILE="${APP_NAME_PATH}/app.txt"

SETTING_FILE=./setting.txt
hadoop fs -getmerge ${INPUT_PATH1} ${SETTING_FILE}
if [ $? -ne 0 ]; then
    exit 255
fi

hadoop fs -rm $APP_NAME_PATH/${SETTING_FILE}
hadoop fs -put ${SETTING_FILE} $APP_NAME_PATH
if [ $? -ne 0 ]; then
    exit 255
fi

hadoop fs -rmr ${OUTPUT_PATH}

hadoop jar ../${JAR} mobvista.prd.datasource.table.MergeAppIDMR \
    -Dmapreduce.fileoutputcommitter.algorithm.version=2 \
    -Dmapred.reduce.tasks=1 \
    "${APP_NAME_PATH}/setting.txt" "${OUTPUT_PATH}" "${FILE}"

if [ $? -ne 0 ];then
  exit 255
fi

mount_partition "app_id_name" "year='$year', month='${month}', day='${day}'" "$OUTPUT_PATH"