package mobvista.dmp.datasource.taobao

import java.net.URI

import mobvista.dmp.common.CommonSparkJob
import org.apache.commons.cli.Options
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.sql.SparkSession


class EtlH32FromDmInstallListV2 extends CommonSparkJob with Serializable {
  override protected def buildOptions(): Options = {
    val options = new Options
    options.addOption("dt_oneday_ago", true, "[must] dt_oneday_ago")
    options.addOption("h32_imei",true, "[must] h32_imei")
    options.addOption("h32_imeimd5",true, "[must] h32_imeimd5")
    options
  }

  override protected def run(args: Array[String]): Int = {
    val commandLine = commParser.parse(options, args)
    if (!checkMustOption(commandLine)) {
      printUsage(options)
      return -1
    } else printOptions(commandLine)

    val dt_oneday_ago = commandLine.getOptionValue("dt_oneday_ago")
    val h32_imei = commandLine.getOptionValue("h32_imei")
    val h32_imeimd5 = commandLine.getOptionValue("h32_imeimd5")

    val spark = SparkSession.builder()
      .appName("EtlH18H32FromDmInstallListV2")
      .config("spark.rdd.compress", "true")
      .config("spark.io.compression.codec", "snappy")
      .config("spark.sql.orc.filterPushdown", "true")
      .config("spark.sql.warehouse.dir", "s3://mob-emr-test/spark-warehouse")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .enableHiveSupport()
      .getOrCreate()


    FileSystem.get(new URI(s"s3://mob-emr-test"), spark.sparkContext.hadoopConfiguration).delete(new Path(h32_imei), true)
    FileSystem.get(new URI(s"s3://mob-emr-test"), spark.sparkContext.hadoopConfiguration).delete(new Path(h32_imeimd5), true)

    val sc = spark.sparkContext
    try {

      val sql3 =
        s"""
           |select device_id
           |from dwh.dm_install_list_v2
           |where dt='${dt_oneday_ago}' and business='dsp_req' and package_name='com.ucmobile_oppo' and device_type='imei'
        """.stripMargin

      spark.sql(sql3).rdd.map(_.mkString).coalesce(600)
        .saveAsTextFile(h32_imei)

      val sql4 =
        s"""
           |select device_id
           |from dwh.dm_install_list_v2
           |where dt='${dt_oneday_ago}' and business='dsp_req' and package_name='com.ucmobile_oppo' and device_type='imeimd5'
        """.stripMargin

      spark.sql(sql4).rdd.map(_.mkString).coalesce(600)
        .saveAsTextFile(h32_imeimd5)

    } finally {
      spark.stop()
    }
    0

  }
}


object EtlH32FromDmInstallListV2 {
  def main(args: Array[String]): Unit = {
     new EtlH32FromDmInstallListV2().run(args)
  }
}