DeviceInfoJob.scala 10.5 KB
Newer Older
wang-jinfeng committed
1 2 3 4 5 6 7 8 9 10 11
package mobvista.dmp.datasource.retargeting

//  import com.datastax.spark.connector._

import com.alibaba.fastjson.{JSON, JSONObject}
import mobvista.dmp.common.CommonSparkJob
import mobvista.dmp.util.DateUtil
import mobvista.prd.datasource.util.GsonUtil
import org.apache.commons.cli.{BasicParser, Options}
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.fs.{FileSystem, Path}
WangJinfeng committed
12
import org.apache.spark.broadcast.Broadcast
wang-jinfeng committed
13 14
import org.apache.spark.sql.{SaveMode, SparkSession}

WangJinfeng committed
15 16
import java.net.URI
import java.util
wang-jinfeng committed
17 18 19 20
import scala.collection.JavaConverters._
import scala.collection.mutable

/**
WangJinfeng committed
21 22 23 24 25 26 27
 * @package: mobvista.dmp.datasource.retargeting
 * @author: wangjf
 * @date: 2019/5/23
 * @time: 下午4:41
 * @email: jinfeng.wang@mobvista.com
 * @phone: 152-1062-7698
 */
wang-jinfeng committed
28 29 30 31 32 33 34 35 36 37 38 39 40 41
class DeviceInfoJob extends CommonSparkJob with Serializable {
  def commandOptions(): Options = {
    val options = new Options()
    options.addOption("date", true, "date")
    options.addOption("output", true, "output")
    options.addOption("coalesce", true, "coalesce")
    options
  }

  import java.text.SimpleDateFormat

  val sdf1 = new SimpleDateFormat("yyyy-MM-dd")
  val sdf2 = new SimpleDateFormat("yyyyMMdd")

WangJinfeng committed
42 43
  var bMap: Broadcast[scala.collection.Map[String, String]] = null
  var packageMap: Broadcast[scala.collection.Map[String, Int]] = null
wang-jinfeng committed
44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59

  override protected def run(args: Array[String]): Int = {
    val parser = new BasicParser()
    val options = commandOptions()
    val commandLine = parser.parse(options, args)
    val date = commandLine.getOptionValue("date")
    val output = commandLine.getOptionValue("output")
    val coalesce = commandLine.getOptionValue("coalesce")
    val spark = SparkSession
      .builder()
      .appName("DeviceInfoJob")
      .config("spark.rdd.compress", "true")
      .config("spark.shuffle.compress", "true")
      .config("spark.sql.orc.filterPushdown", "true")
      .config("spark.io.compression.codec", "lz4")
      .config("spark.io.compression.lz4.blockSize", "64k")
WangJinfeng committed
60
      .config("spark.sql.autoBroadcastJoinThreshold", "367001600")
wang-jinfeng committed
61 62 63 64 65 66 67 68 69 70 71 72 73
      .config("spark.sql.warehouse.dir", "s3://mob-emr-test/spark-warehouse")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .enableHiveSupport()
      .getOrCreate()
    try {

      FileSystem.get(new URI(s"s3://mob-emr-test"), spark.sparkContext.hadoopConfiguration).delete(new Path(output), true)

      val last_req_day = DateUtil.getDayByString(date, "yyyyMMdd", -13)
      val update_date = sdf1.format(sdf2.parse(last_req_day))

      val sc = spark.sparkContext
      val code_sql = Constant.old2new_sql
WangJinfeng committed
74 75 76 77 78
      bMap = sc.broadcast(spark.sql(code_sql).rdd.map(r => {
        (r.getAs("tag_code").toString, r.getAs("new_second_id").toString)
      }).collectAsMap())

      println("bMap.size ===>>> " + bMap.value.size)
wang-jinfeng committed
79

WangJinfeng committed
80 81 82 83 84
      val map = sc.broadcast(spark.sql(Constant.second2first_sql).rdd.map(r => {
        (r.getAs("new_second_id").toString, r.getAs("new_first_id").toString)
      }).collectAsMap())

      println("map.size ===>>> " + map.value.size)
wang-jinfeng committed
85 86 87 88 89 90 91 92 93 94 95

      var package_sql =
        """
          |SHOW PARTITIONS dwh.package_mapping
        """.stripMargin
      var partDF = spark.sql(package_sql)
      val package_dt = partDF.orderBy(partDF("partition").desc).first.getString(0).split("=")(1)
      package_sql =
        s"""
           |SELECT id, package_name FROM dwh.package_mapping WHERE dt = '${package_dt}'
        """.stripMargin
WangJinfeng committed
96 97 98 99 100 101 102
      /*
      val packageMap = spark.sql(package_sql).rdd.map(r => {
        (r.getAs("package_name").toString.toLowerCase, Integer.parseInt(r.getAs("id").toString))
      }).cache().collectAsMap()

      packageMap
      */
WangJinfeng committed
103 104 105
      packageMap = spark.sparkContext.broadcast(spark.sql(package_sql).rdd.map(r => {
        (r.getAs("package_name").toString.toLowerCase, Integer.parseInt(r.getAs("id").toString))
      }).collectAsMap())
wang-jinfeng committed
106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155

      /*
      packageMap = sc.broadcast(Constant.jdbcConnection(spark, "mob_adn", "dmp_app_map").rdd.map(r => {
        (r.getAs("app_package_name").toString, Integer.parseInt(r.getAs("id").toString))
      }).collectAsMap())
      */

      spark.udf.register("getId", getId _)
      spark.udf.register("getInstallList", getInstallList _)
      spark.udf.register("getInterestList", getInterestList _)
      spark.udf.register("getFrenquency", getFrenquency _)
      spark.udf.register("checkDevice", Constant.checkDevice _)
      val statistics_sql =
        """
          |SHOW PARTITIONS dwh.dmp_device_tag_statistics
        """.stripMargin
      partDF = spark.sql(statistics_sql)
      val freqDate = partDF.orderBy(partDF("partition").desc).first.getString(0).split("=")(1)
      val freqSql = Constant.statistics_sql.replace("@date", freqDate)
      spark.sql(freqSql).createOrReplaceTempView("dm_active")

      val active_sql =
        """
          |SHOW PARTITIONS dwh.dm_active_tag
        """.stripMargin
      partDF = spark.sql(active_sql)
      var activeDate = partDF.orderBy(partDF("partition").desc).take(2)(1).getString(0).split("/")(0).split("=")(1)
      val part = partDF.orderBy(partDF("partition").desc).take(2)(1).getString(0).split("/")(1).split("=")(1)
      if (!part.equals("month")) {
        activeDate = DateUtil.getDayByString(activeDate, "yyyyMMdd", -1)
      }

      //  .replace("@last_req_day", last_req_day)
      val sql = Constant.ods_dmp_user_info_all_sql_distinct
        .replaceAll("@date", date)
        .replaceAll("@activeDate", activeDate)
        .replaceAll("@update_date", update_date)

      import spark.implicits._
      spark.sql(sql)
        .rdd.map(r => {
        import scala.collection.JavaConversions._

        val interest: mutable.HashSet[String] = new mutable.HashSet[String]()
        val freObject = if (r.getAs("frequency") != null && StringUtils.isNotBlank(r.getAs("frequency").toString)) {
          JSON.parseObject(r.getAs("frequency").toString)
        } else {
          new JSONObject()
        }
        freObject.keySet().foreach(key => {
WangJinfeng committed
156
          interest.add(map.value(key))
wang-jinfeng committed
157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173
          interest.add(key)
        })
        /*
        val frequencyEntity = FrequencyEntity(r.getAs("frequency"))

        if (frequencyEntity.frequency != null && frequencyEntity.frequency.nonEmpty) {
          for (i <- frequencyEntity.frequency.indices) {
            val tag = frequencyEntity.frequency.get(i).asInstanceOf[GenericRowWithSchema].getAs("tag").toString
            val cnt = Integer.parseInt(frequencyEntity.frequency.get(i).asInstanceOf[GenericRowWithSchema].getAs("cnt").toString)
            jsonObject.put(tag, cnt)
            interest.add(map.value(tag))
            interest.add(tag)
          }
        }
        */
        val interestArr = r.getAs("interest").asInstanceOf[mutable.WrappedArray[String]]
        interestArr.foreach(i => {
WangJinfeng committed
174
          interest.add(map.value(i))
wang-jinfeng committed
175 176 177 178 179 180 181 182 183 184 185 186 187 188 189
          interest.add(i)
        })

        val tag_week_jsonObject = new JSONObject()
        if (r.getAs("tag_week") != null && StringUtils.isNotBlank(r.getAs("tag_week"))) {
          val jsonArray = GsonUtil.String2JsonArray(r.getAs("tag_week"))
          for (json <- jsonArray) {
            val j = json.getAsJsonObject
            val tag_id = j.get("tag_id").getAsString
            val cntJson = new JSONObject()
            val cnt = j.get("cnt").getAsInt
            cntJson.put("cnt", cnt)
            val count = j.get("count").getAsInt
            cntJson.put("count", count)
            tag_week_jsonObject.put(tag_id, cntJson)
WangJinfeng committed
190
            interest.add(map.value(tag_id))
wang-jinfeng committed
191 192 193 194 195 196 197 198 199 200 201 202 203 204 205
            interest.add(tag_id)
          }
        }
        val tag_month_jsonObject = new JSONObject()
        if (r.getAs("tag_month") != null && StringUtils.isNotBlank(r.getAs("tag_month"))) {
          val jsonArray = GsonUtil.String2JsonArray(r.getAs("tag_month"))
          for (json <- jsonArray) {
            val j = json.getAsJsonObject
            val tag_id = j.get("tag_id").getAsString
            val cntJson = new JSONObject()
            val cnt = j.get("cnt").getAsInt
            cntJson.put("cnt", cnt)
            val count = j.get("count").getAsInt
            cntJson.put("count", count)
            tag_month_jsonObject.put(tag_id, cntJson)
WangJinfeng committed
206
            interest.add(map.value(tag_id))
wang-jinfeng committed
207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236
            interest.add(tag_id)
          }
        }

        DeviceInfoEntity(r.getAs("device_id").toString.toUpperCase, r.getAs("platform"), r.getAs("model"), r.getAs("os_version"),
          r.getAs("country"), r.getAs("age"), r.getAs("gender"), r.getAs("install"), mutable.WrappedArray.make(interest.toArray[String]),
          r.getAs("behavior"), freObject.toJSONString, tag_week_jsonObject.toJSONString, tag_month_jsonObject.toJSONString, r.getAs("region"),
          r.getAs("update_date"), r.getAs("publish_date"))
      }).toDF
        .repartition(coalesce.toInt)
        .write
        .mode(SaveMode.Overwrite)
        .option("orc.compress", "snappy")
        .orc(output)

    } finally {
      if (spark != null) {
        spark.stop()
      }
    }
    0
  }

  //  生成 Set[inter_id]
  def getInterestList(interest: String): Array[String] = {
    val set = new util.HashSet[String]()
    interest.split("#").foreach(inters => {
      val ins = inters.toUpperCase.split(",")
      if (ins.length >= 3) {
        val key = ins(0) + "-" + ins(1) + "-" + ins(2)
WangJinfeng committed
237 238
        val vals = if (bMap.value.keySet.contains(key)) {
          bMap.value(key)
wang-jinfeng committed
239
        } else {
WangJinfeng committed
240
          bMap.value.getOrElse(key + "OTHER", "")
wang-jinfeng committed
241 242 243 244 245 246 247 248 249 250
        }
        if (StringUtils.isNotBlank(vals)) {
          set.add(vals)
        }
      }
    })
    set.asScala.toArray
  }

  def getId(tag_code: String): String = {
WangJinfeng committed
251 252
    val id = if (bMap.value.keySet.contains(tag_code.toUpperCase)) {
      bMap.value(tag_code.toUpperCase)
wang-jinfeng committed
253
    } else {
WangJinfeng committed
254
      bMap.value.getOrElse(tag_code.toUpperCase + "OTHER", "")
wang-jinfeng committed
255 256 257 258 259 260 261 262 263 264
    }
    id
  }

  //  生成 Set[package_name]
  def getInstallList(install: String): Array[Int] = {
    val set = new util.HashSet[Int]()
    if (StringUtils.isNotBlank(install)) {
      install.split(",").foreach(pkgs => {
        val pkd = pkgs.split("\\|")
WangJinfeng committed
265
        if (pkd.nonEmpty && StringUtils.isNotBlank(pkd(0)) && packageMap.value.contains(pkd(0).toLowerCase)
wang-jinfeng committed
266
        ) {
WangJinfeng committed
267
          set.add(packageMap.value(pkd(0).toLowerCase))
wang-jinfeng committed
268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290
        }
      })
    }
    set.asScala.toArray
  }

  def getFrenquency(frenquency: String): Array[(String, Int)] = {
    val set = new util.HashSet[(String, Int)]()
    frenquency.split(",").foreach(fn => {
      val fns = fn.split(":")
      if (StringUtils.isNotBlank(fns(0))) {
        set.add(fns(0), fns(1).toInt)
      }
    })
    set.asScala.toArray
  }
}

object DeviceInfoJob {
  def main(args: Array[String]): Unit = {
    new DeviceInfoJob().run(args)
  }
}