package mobvista.dmp.datasource.iqiyi import java.net.URI import mobvista.dmp.common.CommonSparkJob import mobvista.dmp.util.PropertyUtil import org.apache.commons.cli.{BasicParser, Options} import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.spark.sql.{SaveMode, SparkSession} /** * @package: mobvista.dmp.datasource.iqiyi * @author: wangjf * @date: 2020/4/29 * @time: 11:52 上午 * @email: jinfeng.wang@mobvista.com * @phone: 152-1062-7698 */ class IQiYiDaily extends CommonSparkJob with Serializable { def commandOptions(): Options = { val options = new Options() options.addOption("date", true, "date") options.addOption("partNum", true, "partNum") options.addOption("output", true, "output") options.addOption("cluster", true, "cluster") options } override protected def run(args: Array[String]): Int = { val parser = new BasicParser() val options = commandOptions() val commandLine = parser.parse(options, args) val date = commandLine.getOptionValue("date") val partNum = commandLine.getOptionValue("partNum") val output = commandLine.getOptionValue("output") val cluster = commandLine.getOptionValue("cluster") val spark = SparkSession .builder() .appName("IQiYiDaily") .config("spark.rdd.compress", "true") .config("spark.io.compression.codec", "lz4") .config("spark.sql.warehouse.dir", "s3://mob-emr-test/spark-warehouse") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.clickhouse.driver", "ru.yandex.clickhouse.ClickHouseDriver") .config("spark.clickhouse.url", PropertyUtil.getProperty("config.properties", "spark.clickhouse.url")) .config("spark.clickhouse.connection.per.executor.max", "5") .config("spark.clickhouse.metrics.enable", "true") .config("spark.clickhouse.socket.timeout.ms", "300000") .config("spark.clickhouse.cluster.auto-discovery", "true") .enableHiveSupport() .getOrCreate() try { val sc = spark.sparkContext import io.clickhouse.spark.connector._ val dt = mobvista.dmp.common.MobvistaConstant.sdf1.format(mobvista.dmp.common.MobvistaConstant.sdf2.parse(date)) val query = Constant.iqiyi_sql.replace("@date", dt) import spark.implicits._ val df = sc.clickhouseTable(query, cluster) .withCustomPartitioning(Constant.buildPart(Integer.parseInt(partNum))) .map(r => { Constant.IQiYiEntity(r.getAs("device_id").toString, r.getAs("device_type").toString, r.getAs("platform").toString, r.getAs("package_name").toString, "CN") }).toDF FileSystem.get(new URI(s"s3://mob-emr-test"), sc.hadoopConfiguration).delete(new Path(output), true) df.distinct().coalesce(10) .write .mode(SaveMode.Overwrite) .option("orc.compress", "zlib") .orc(output) } finally { if (spark != null) { spark.stop() } } 0 } } object IQiYiDaily { def main(args: Array[String]): Unit = { new IQiYiDaily().run(args) } }