UCOtherDataToDmp.scala 5.66 KB
package mobvista.dmp.datasource.taobao

import java.net.URI

import mobvista.dmp.common.CommonSparkJob
import org.apache.commons.cli.Options
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.sql.SparkSession


class UCOtherDataToDmp extends CommonSparkJob with Serializable  {
  override protected def buildOptions(): Options = {
    val options = new Options
    options.addOption("dt_today", true, "[must] dt_today")
    options.addOption("dt_oneday_ago", true, "[must] dt_oneday_ago")
    options.addOption("update", true, "[must] update")
    options.addOption("output01",true, "[must] output01")
    options.addOption("output02",true, "[must] output02")
    options.addOption("output03",true, "[must] output03")
    options.addOption("output04",true, "[must] output04")
    options
  }

  override protected def run(args: Array[String]): Int = {
    val commandLine = commParser.parse(options, args)
    if (!checkMustOption(commandLine)) {
      printUsage(options)
      return -1
    } else printOptions(commandLine)

    val dt_today = commandLine.getOptionValue("dt_today")
    val dt_oneday_ago = commandLine.getOptionValue("dt_oneday_ago")
    val update = commandLine.getOptionValue("update")
    val output01 = commandLine.getOptionValue("output01")
    val output02 = commandLine.getOptionValue("output02")
    val output03 = commandLine.getOptionValue("output03")
    val output04 = commandLine.getOptionValue("output04")

    val spark = SparkSession.builder()
      .appName("UCOtherDataToDmp")
      .config("spark.rdd.compress", "true")
      .config("spark.io.compression.codec", "snappy")
      .config("spark.sql.orc.filterPushdown", "true")
      .config("spark.sql.warehouse.dir", "s3://mob-emr-test/spark-warehouse")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .enableHiveSupport()
      .getOrCreate()


    FileSystem.get(new URI(s"s3://mob-emr-test"), spark.sparkContext.hadoopConfiguration).delete(new Path(output01), true)
    FileSystem.get(new URI(s"s3://mob-emr-test"), spark.sparkContext.hadoopConfiguration).delete(new Path(output02), true)
    FileSystem.get(new URI(s"s3://mob-emr-test"), spark.sparkContext.hadoopConfiguration).delete(new Path(output03), true)
    FileSystem.get(new URI(s"s3://mob-emr-test"), spark.sparkContext.hadoopConfiguration).delete(new Path(output04), true)

    val sc = spark.sparkContext
    try {

      val sql1 =
        s"""
           |select distinct t1.device_id from
           |(select device_id from dwh.dm_install_list_v2
           |where dt='${dt_today}' and business='uc_activation' and device_type='imeimd5'
           |and package_name in ('com.uc.foractivation.4b5a58')) t1
           |inner join
           |(select device_id from dwh.dm_install_list_v2
           |where dt='${dt_oneday_ago}' and business='dsp_req' and device_type='imeimd5'
           |and package_name in ('com.UCMobile_bes')) t2
           |on t1.device_id=t2.device_id
        """.stripMargin

      spark.sql(sql1).rdd.map(_.mkString).map(row => {
        row + "\t" + "imeimd5" + "\t" + "android" + "\t" + "com.uc.foractivation.4b5a58_ucbes" + "\t" + update
      }).coalesce(50).saveAsTextFile(output01)

      val sql2 =
        s"""
           |select distinct t1.device_id from
           |(select device_id from dwh.dm_install_list_v2
           |where dt='${dt_today}' and business='uc_activation' and device_type='imeimd5'
           |and package_name in ('com.uc.foractivation.d3f521')) t1
           |inner join
           |(select device_id from dwh.dm_install_list_v2
           |where dt='${dt_oneday_ago}' and business='dsp_req' and device_type='imeimd5'
           |and package_name in ('com.UCMobile_bes')) t2
           |on t1.device_id=t2.device_id
        """.stripMargin

      spark.sql(sql2).rdd.map(_.mkString).map(row => {
        row + "\t" + "imeimd5" + "\t" + "android" + "\t" + "com.uc.foractivation.d3f521_ucbes" + "\t" + update
      }).coalesce(50).saveAsTextFile(output02)

      val sql3 =
        s"""
           |select distinct t1.device_id from
           |(select device_id from dwh.dm_install_list_v2
           |where dt='${dt_today}' and business='uc_activation' and device_type='imeimd5'
           |and package_name in ('com.uc.foractivation.4b5a58')) t1
           |inner join
           |(select device_id from dwh.dm_install_list_v2
           |where dt='${dt_oneday_ago}' and business='dsp_req' and device_type='imeimd5'
           |and package_name in ('com.ucmobile_oppo')) t2
           |on t1.device_id=t2.device_id
        """.stripMargin

      spark.sql(sql3).rdd.map(_.mkString).map(row => {
        row + "\t" + "imeimd5" + "\t" + "android" + "\t" + "com.uc.foractivation.4b5a58_ucoppo" + "\t" + update
      }).coalesce(50).saveAsTextFile(output03)

      val sql4 =
        s"""
           |select distinct t1.device_id from
           |(select device_id from dwh.dm_install_list_v2
           |where dt='${dt_today}' and business='uc_activation' and device_type='imeimd5'
           |and package_name in ('com.uc.foractivation.d3f521')) t1
           |inner join
           |(select device_id from dwh.dm_install_list_v2
           |where dt='${dt_oneday_ago}' and business='dsp_req' and device_type='imeimd5'
           |and package_name in ('com.ucmobile_oppo')) t2
           |on t1.device_id=t2.device_id
        """.stripMargin

      spark.sql(sql4).rdd.map(_.mkString).map(row => {
        row + "\t" + "imeimd5" + "\t" + "android" + "\t" + "com.uc.foractivation.d3f521_ucoppo" + "\t" + update
      }).coalesce(50).saveAsTextFile(output04)

    } finally {
      spark.stop()
    }
    0
  }
}

object UCOtherDataToDmp {
  def main(args: Array[String]): Unit = {
    new UCOtherDataToDmp().run(args)
  }
}