package mobvista.dmp.datasource.retargeting import com.datastax.spark.connector._ import mobvista.dmp.util.PropertyUtil import mobvista.prd.datasource.util.GsonUtil import org.apache.commons.cli.{BasicParser, Options} import org.apache.commons.lang3.StringUtils import org.apache.spark.sql.{Row, SparkSession} import scala.collection.mutable class UserFeatureCassandra extends Serializable { def commandOptions(): Options = { val options = new Options() options.addOption("input", true, "input") options.addOption("region", true, "region") options } private def run(args: Array[String]) { val parser = new BasicParser() val options = commandOptions() val commandLine = parser.parse(options, args) val input = commandLine.getOptionValue("input") val region = commandLine.getOptionValue("region") // val hk_host = "159.138.142.33,159.138.54.116,159.138.45.146,159.138.133.96,159.138.40.115,159.138.134.226,159.138.52.12" // val vg_host = "172.20.104.221,172.20.104.217,172.20.104.216,172.20.104.219,172.20.104.220,172.20.104.218" // val cn_host = "172.17.117.236,172.17.117.237,172.17.117.238" // val hosts = Array(hk_host, vg_host, cn_host) val spark = SparkSession .builder() .appName(s"UserFeatureCassandra.${region.toUpperCase}") .config("spark.rdd.compress", "true") .config("spark.io.compression.codec", "lz4") .config("spark.sql.orc.filterPushdown", "true") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.warehouse.dir", "s3://mob-emr-test/spark-warehouse") .config("spark.cassandra.connection.host", PropertyUtil.getProperty("ip.properties", region + "_host")) .config("spark.cassandra.connection.port", "9042") .config("spark.cassandra.connection.factory", s"mobvista.dmp.utils.cassandra.${region.toUpperCase}Factory") .config("spark.cassandra.connection.connections_per_executor_max", "16") .config("spark.cassandra.output.concurrent.writes", "2048") .config("spark.cassandra.concurrent.reads", "2048") .config("spark.cassandra.output.batch.grouping.buffer.size", "2048") .config("spark.cassandra.connection.keep_alive_ms", "600000") .enableHiveSupport() .getOrCreate() try { val bMap = spark.sparkContext.broadcast(spark.sql(Constant.id_old2new_sql).rdd.map(r => { (r.getAs("tag_code").toString, r.getAs("tag_id").toString) }).collectAsMap()) val rdd = spark.read.orc(input) val keyspace = "dmp_realtime_service" val table = "dmp_user_features" val columns = SomeColumns("device_id", "age", "gender", "install_apps", "interest", "frequency" overwrite) rdd.rdd.map(r => { val device_id = r.getAs("device_id").toString val age = r.getAs("age").asInstanceOf[Int] val gender = r.getAs("gender").asInstanceOf[Int] val install_apps = r.getAs("install_apps").toString val interest = r.getAs("interest").asInstanceOf[mutable.WrappedArray[String]] val interest_set = new mutable.HashSet[Int]() interest.iterator.foreach(r => { if (bMap.value.keySet.contains(r) && StringUtils.isNotBlank(bMap.value(r))) { interest_set.add(bMap.value(r).toInt) } }) val frequencySet = new mutable.HashSet[(String, Int)]() val frequency = r.getAs("frequency").toString import scala.collection.JavaConversions._ val json = GsonUtil.String2JsonObject(frequency) json.entrySet().foreach(j => { if (StringUtils.isNotBlank(j.getKey) && bMap.value.keySet.contains(j.getKey)) { frequencySet.add(bMap.value(j.getKey), j.getValue.getAsInt) interest_set.add(bMap.value(j.getKey).toInt) } }) Row(device_id, age, gender, install_apps, interest_set.mkString(","), mutable.WrappedArray.make(frequencySet.toArray)) }).saveToCassandra(keyspace, table, columns) } finally { if (spark != null) { spark.stop() } } } } object UserFeatureCassandra { def main(args: Array[String]): Unit = { new UserFeatureCassandra().run(args) /* val threadPool: ExecutorService = Executors.newFixedThreadPool(1) try { //提交5个线程 for (i <- 0 to 2) { threadPool.submit(new UserFeatureCassandra(args, i)) } } finally { threadPool.shutdown() } */ } }