package mobvista.dmp.datasource.ga import java.net.URI import java.util.regex.Pattern import mobvista.dmp.common.CommonSparkJob import org.apache.commons.cli.Options import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.spark.sql.{SaveMode, SparkSession} /** * @package: mobvista.dmp.datasource.ga * @author: wangjf * @date: 2018/11/4 * @time: 上午9:50 * @email: jinfeng.wang@mobvista.com * @phone: 152-1062-7698 */ class GaParser extends CommonSparkJob with Serializable { override protected def run(args: Array[String]): Int = { val commandLine = commParser.parse(options, args) if (!checkMustOption(commandLine)) { printUsage(options) return -1 } else { printOptions(commandLine) } val input = commandLine.getOptionValue("input") val date_str_midline = commandLine.getOptionValue("date_str_midline") val coalesce = commandLine.getOptionValue("coalesce") val output = commandLine.getOptionValue("output") val spark = SparkSession.builder() .appName("GaParser") .config("spark.rdd.compress", "true") .config("spark.io.compression.codec", "snappy") .config("spark.sql.orc.filterPushdown", "true") .config("spark.sql.warehouse.dir", "s3://mob-emr-test/spark-warehouse") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .enableHiveSupport() .getOrCreate() val sc = spark.sparkContext FileSystem.get(new URI(s"s3://mob-emr-test"), sc.hadoopConfiguration).delete(new Path(output), true) try { val ga_filter = "s3://mob-emr-test/ga-filter-lists/games.csv" val filter_rdd = if (FileSystem.get(new URI(s"s3://mob-emr-test"), sc.hadoopConfiguration).exists(new Path(ga_filter))) { sc.textFile(ga_filter).map(_.split(",")).filter(r => { Pattern.compile("^\\d+$").matcher(r(0)).matches() }).map(r => { r(0).toInt }) } else { sc.emptyRDD[Int] } val filter_gameId = filter_rdd.collect().toSet[Int] println("filter_gameId -->> " + filter_gameId) sc.broadcast(filter_gameId) val rdd = sc.textFile(path = input).coalesce(coalesce.toInt) val df = rdd.mapPartitions(Constant.parserGa(_, date_str_midline, filter_gameId)) import spark.implicits._ df.filter(_ != null).toDF .write.mode(SaveMode.Overwrite) .option("orc.compress", "zlib") .orc(output) } finally { sc.stop() spark.stop() } 0 } override protected def buildOptions(): Options = { val options = new Options options.addOption("input", true, "[must] input") options.addOption("date_str_midline", true, "[must] date_str_midline") options.addOption("coalesce", true, "[must] coalesce") options.addOption("output", true, "[must] output") options } } object GaParser { def main(args: Array[String]): Unit = { new GaParser().run(args) } }