DeviceIDMappingLower.scala 2.38 KB
package mobvista.dmp.datasource.device

import mobvista.dmp.common.{CommonSparkJob, MobvistaConstant}
import mobvista.dmp.util.DateUtil
import org.apache.commons.cli.{BasicParser, Options}
import org.apache.hadoop.fs.{FileSystem, Path}

import java.net.URI

class DeviceIDMappingLower extends CommonSparkJob with Serializable {

  override protected def run(args: Array[String]): Int = {
    val parser = new BasicParser()
    val options = commandOptions()
    val commandLine = parser.parse(options, args)
    val date = commandLine.getOptionValue("date")
    val output = commandLine.getOptionValue("output")
    val coalesce = commandLine.getOptionValue("coalesce")

    val spark = MobvistaConstant.createSparkSession(s"DeviceIDMappingLower.${date}")

    try {
      val sc = spark.sparkContext
      val updateDate = MobvistaConstant.sdf1.format(MobvistaConstant.sdf2.parse(date))
      val lastDate = DateUtil.getDayByString(date, "yyyyMMdd", -1)
      val lastUpdateDate = DateUtil.getDayByString(updateDate, "yyyy-MM-dd", -1)

      val sql =
        s"""
           |SELECT device_id, LOWER(device_id) lower_device_id
           |  FROM (
           |    SELECT device_id
           |      FROM dwh.dmp_install_list WHERE dt = '${date}' AND update_date = '${updateDate}'
           |      AND business IN ('adn_request_sdk','reyun','dsp_req','adn_install') AND platform = 'android' AND device_type = 'oaid'
           |    UNION ALL
           |    SELECT device_id
           |      FROM dwh.dmp_install_list WHERE dt = '${lastDate}' AND update_date = '${lastUpdateDate}'
           |      AND business = 'other' AND platform = 'android' AND device_type = 'oaid'
           |    ) t
           |  GROUP BY device_id
        """.stripMargin

      FileSystem.get(new URI(s"s3://mob-emr-test"), sc.hadoopConfiguration).delete(new Path(output), true)
      spark.sql(sql)
        .repartition(coalesce.toInt)
        .write
        .option("orc.compress", "zlib")
        .orc(output)
    } finally {
      if (spark != null) {
        spark.stop()
      }
    }
    0
  }

  def commandOptions(): Options = {
    val options = new Options()
    options.addOption("date", true, "date")
    options.addOption("output", true, "output")
    options.addOption("coalesce", true, "coalesce")
    options
  }

}

object DeviceIDMappingLower {
  def main(args: Array[String]): Unit = {
    new DeviceIDMappingLower().run(args)
  }
}