AliDaily.scala 6.32 KB
Newer Older
wang-jinfeng committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156
package mobvista.dmp.datasource.baichuan

import java.net.URI
import java.text.SimpleDateFormat

import mobvista.dmp.util.{DateUtil, PropertyUtil}
import mobvista.dmp.utils.clickhouse.ClickHouseConnectionFactory
import mobvista.dmp.utils.clickhouse.ClickHouseSparkExt._
import org.apache.commons.cli.{BasicParser, Options}
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.sql.{Row, SaveMode, SparkSession}
import org.apache.spark.storage.StorageLevel
import ru.yandex.clickhouse.ClickHouseDataSource

/**
  * @package: mobvista.dmp.datasource.baichuan
  * @author: wangjf
  * @date: 2019-08-28
  * @time: 17:50
  * @email: jinfeng.wang@mobvista.com
  * @phone: 152-1062-7698
  */
class AliDaily extends Serializable {
  def commandOptions(): Options = {
    val options = new Options()
    options.addOption("date", true, "date")
    options.addOption("partNum", true, "partNum")
    options.addOption("output", true, "output")
    options.addOption("host", true, "host")
    options.addOption("cluster", true, "cluster")
    options.addOption("database", true, "database")
    options.addOption("table", true, "table")
    options
  }

  protected def run(args: Array[String]) {
    val parser = new BasicParser()
    val options = commandOptions()
    val commandLine = parser.parse(options, args)
    val date = commandLine.getOptionValue("date")
    val partNum = commandLine.getOptionValue("partNum")
    val output = commandLine.getOptionValue("output")
    val cluster = commandLine.getOptionValue("cluster")
    val host = commandLine.getOptionValue("host")
    val database = commandLine.getOptionValue("database")
    val table = commandLine.getOptionValue("table")

    val spark = SparkSession
      .builder()
      .appName("BaiChuanDaily")
      .config("spark.rdd.compress", "true")
      .config("spark.io.compression.codec", "lz4")
      .config("spark.sql.warehouse.dir", "s3://mob-emr-test/spark-warehouse")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.clickhouse.driver", "ru.yandex.clickhouse.ClickHouseDriver")
      .config("spark.clickhouse.url", PropertyUtil.getProperty("config.properties", "spark.clickhouse.url"))
      .config("spark.clickhouse.connection.per.executor.max", "5")
      .config("spark.clickhouse.metrics.enable", "true")
      .config("spark.clickhouse.socket.timeout.ms", "300000")
      .config("spark.clickhouse.cluster.auto-discovery", "true")
      .enableHiveSupport()
      .getOrCreate()
    try {
      val sdf1 = new SimpleDateFormat("yyyy-MM-dd")
      val sdf2 = new SimpleDateFormat("yyyyMMdd")

      val sc = spark.sparkContext
      val clusterName = "cluster_1st"
      import io.clickhouse.spark.connector._
      //  import spark.implicits._

      val dt = sdf1.format(sdf2.parse(date))
      val query = Constant.baichuan_sql.replace("@date", dt)

      val rdd = sc.clickhouseTable(query, clusterName)
        .withCustomPartitioning(Constant.buildPart(Integer.parseInt(partNum)))
        .persist(StorageLevel.MEMORY_AND_DISK_SER)
      val df = rdd.map(r => {
        ((r.getAs("device_id").toString, r.getAs("device_type").toString, r.getAs("platform").toString), r.getAs("package_name").toString)
      })

      import spark.implicits._

      val dff = df.combineByKey(
        (v: String) => Iterable(v),
        (c: Iterable[String], v: String) => c ++ Seq(v),
        (c1: Iterable[String], c2: Iterable[String]) => c1 ++ c2
      ).map(r => {
        BaiChuanEntity(r._1._1, r._1._2, r._1._3, r._2.toSet.mkString(","), "CN")
      }).toDF

      val beforeDate = DateUtil.getDay(sdf2.parse(date), "yyyyMMdd", -1)
      val install_sql = Constant.install_sql.replace("@dt", beforeDate)
      val install_df = spark.sql(install_sql)
      val new_rdd = sc.clickhouseTable(Constant.baichuan_new_sql.replace("@date", dt), clusterName)
        .withCustomPartitioning(Constant.buildPart(Integer.parseInt(partNum)))
        .map(r => {
          Row(r.getAs("device_id").toString, r.getAs("device_type").toString)
        })

      val new_df = spark.createDataFrame(new_rdd, Constant.schema)

      val old_df = spark.createDataFrame(rdd.map(r => {
        Row(r.getAs("device_id").toString, r.getAs("device_type").toString)
      }), Constant.schema)
      val daily_new_df = new_df.except(install_df.unionAll(old_df)).rdd
        .persist(StorageLevel.MEMORY_AND_DISK_SER)

      val s3_df = daily_new_df.map(r => {
        if ("idfa".equals(r.getAs("device_type").toString)) {
          BaiChuanEntity(r.getAs("device_id").toString, "idfa", "ios", "0000000000", "CN")
        } else if ("imei".equals(r.getAs("device_type").toString)) {
          BaiChuanEntity(r.getAs("device_id").toString, "imei", "android", "com.nonetaobao.nonetaobao", "CN")
        } else {
          BaiChuanEntity(r.getAs("device_id").toString, "imeimd5", "android", "com.nonetaobao.nonetaobao", "CN")
        }
      }).toDF

      FileSystem.get(new URI(s"s3://mob-emr-test"), sc.hadoopConfiguration).delete(new Path(output), true)
      s3_df.unionAll(dff).coalesce(10)
        .write
        .mode(SaveMode.Overwrite)
        .option("orc.compress", "zlib")
        .orc(output)

      implicit val clickhouseDataSource: ClickHouseDataSource = ClickHouseConnectionFactory.get(host)

      val update_date = DateUtil.getDay(sdf2.parse(date), "yyyy-MM-dd", 6)
      val ck_df = daily_new_df.map(r => {
        if ("idfa".equals(r.getAs("device_type").toString)) {
          BaiChuanCK(r.getAs("device_id"), 2, 2, 0, update_date)
          //  BaiChuanInstallEntity(r.getAs("device_id").toString, "idfa", "ios", "0")
        } else if ("imei".equals(r.getAs("device_type").toString)) {
          BaiChuanCK(r.getAs("device_id"), 2, 1, 0, update_date)
          //  BaiChuanInstallEntity(r.getAs("device_id").toString, "imei", "android", "0")
        } else {
          BaiChuanCK(r.getAs("device_id"), 2, 3, 0, update_date)
        }
      }).toDF

      //  ck_df.createClickHouseDb(database, Some(clusterName))
      //  daily_new_df.createClickHouseTable(database, table, Seq("dt"), Constant.indexColumn, Constant.orderColumn, Some(clusterName))
      ck_df.saveToClickHouse(database, table, Seq(update_date), Seq("dt"), Some(clusterName), batchSize = 1000000)
    } finally {
      if (spark != null) {
        spark.stop()
      }
    }
  }
}

object AliDaily {
  def main(args: Array[String]): Unit = {
    new AliDaily().run(args)
  }
}