package mobvista.dmp.datasource.taobao import java.net.URI import mobvista.dmp.common.CommonSparkJob import mobvista.dmp.util.PropertyUtil import org.apache.commons.cli.{BasicParser, Options} import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.hadoop.io.compress.GzipCodec import org.apache.spark.sql.SparkSession class YOUKULaxinDF extends CommonSparkJob with Serializable { def commandOptions(): Options = { val options = new Options() options.addOption("date", true, "date") options.addOption("dt_three_days_ago", true, "dt_three_days_ago") options.addOption("partNum", true, "partNum") options.addOption("imeiOutput", true, "imeiOutput") options.addOption("oaidOutput", true, "oaidOutput") options.addOption("cluster", true, "cluster") options } override protected def run(args: Array[String]): Int = { val parser = new BasicParser() val options = commandOptions() val commandLine = parser.parse(options, args) val date = commandLine.getOptionValue("date") val dt_three_days_ago = commandLine.getOptionValue("dt_three_days_ago") val partNum = commandLine.getOptionValue("partNum") val imeiOutput = commandLine.getOptionValue("imeiOutput") val oaidOutput = commandLine.getOptionValue("oaidOutput") val cluster = commandLine.getOptionValue("cluster") val spark = SparkSession .builder() .appName(s"YOUKULaxinDF.${date}") .config("spark.rdd.compress", "true") .config("spark.io.compression.codec", "lz4") .config("spark.sql.warehouse.dir", "s3://mob-emr-test/spark-warehouse") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.clickhouse.driver", "ru.yandex.clickhouse.ClickHouseDriver") .config("spark.clickhouse.url", PropertyUtil.getProperty("config.properties", "spark.clickhouse.url")) .config("spark.clickhouse.connection.per.executor.max", "5") .config("spark.clickhouse.metrics.enable", "true") .config("spark.clickhouse.socket.timeout.ms", "300000") .config("spark.clickhouse.cluster.auto-discovery", "true") .enableHiveSupport() .getOrCreate() try { val sc = spark.sparkContext import io.clickhouse.spark.connector._ val dt = mobvista.dmp.common.MobvistaConstant.sdf1.format(mobvista.dmp.common.MobvistaConstant.sdf2.parse(date)) val imeiQWuery = Constant.youku_laxin_sql.replace("@end_date", dt).replace("@begin_date", dt_three_days_ago).replace("@device_type", "IMEI_MD5") val imeiRDD = sc.clickhouseTable(imeiQWuery, cluster) .withCustomPartitioning(Constant.buildPartUCLaHuo(Integer.parseInt(partNum))) .map(r => { r.getAs[String]("device_ids") }) FileSystem.get(new URI(s"s3://mob-emr-test"), sc.hadoopConfiguration).delete(new Path(imeiOutput), true) imeiRDD.distinct().coalesce(10) .saveAsTextFile(imeiOutput, classOf[GzipCodec]) // 存储oaidmd5数据到s3 val oaidQWuery = Constant.youku_laxin_sql.replace("@end_date", dt).replace("@begin_date", dt_three_days_ago).replace("@device_type", "OAID_MD5") val oaidRDD = sc.clickhouseTable(oaidQWuery, cluster) .withCustomPartitioning(Constant.buildPartUCLaHuo(Integer.parseInt(partNum))) .map(r => { r.getAs[String]("device_ids") }) FileSystem.get(new URI(s"s3://mob-emr-test"), sc.hadoopConfiguration).delete(new Path(oaidOutput), true) oaidRDD.distinct().coalesce(10) .saveAsTextFile(oaidOutput, classOf[GzipCodec]) } finally { if (spark != null) { spark.stop() } } 0 } } object YOUKULaxinDF { def main(args: Array[String]): Unit = { new YOUKULaxinDF().run(args) } }