1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
package mobvista.dmp.datasource.taobao
import java.net.URI
import mobvista.dmp.common.CommonSparkJob
import mobvista.dmp.util.PropertyUtil
import org.apache.commons.cli.{BasicParser, Options}
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.io.compress.GzipCodec
import org.apache.spark.sql.SparkSession
class AlipayLaHuoDF extends CommonSparkJob with Serializable {
def commandOptions(): Options = {
val options = new Options()
options.addOption("date", true, "date")
options.addOption("partNum", true, "partNum")
options.addOption("imeiOutput", true, "imeiOutput")
options.addOption("cluster", true, "cluster")
options.addOption("dt_end_days", true, "dt_end_days")
options.addOption("hour", true, "hour")
options
}
override protected def run(args: Array[String]): Int = {
val parser = new BasicParser()
val options = commandOptions()
val commandLine = parser.parse(options, args)
val date = commandLine.getOptionValue("date")
val partNum = commandLine.getOptionValue("partNum")
val imeiOutput = commandLine.getOptionValue("imeiOutput")
val cluster = commandLine.getOptionValue("cluster")
val dt_end_days = commandLine.getOptionValue("dt_end_days")
val hour = commandLine.getOptionValue("hour")
val spark = SparkSession
.builder()
.appName(s"AlipayLaHuoDF.${date}")
.config("spark.rdd.compress", "true")
.config("spark.io.compression.codec", "lz4")
.config("spark.sql.warehouse.dir", "s3://mob-emr-test/spark-warehouse")
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.config("spark.clickhouse.driver", "ru.yandex.clickhouse.ClickHouseDriver")
.config("spark.clickhouse.url", PropertyUtil.getProperty("config.properties", "spark.clickhouse.url"))
.config("spark.clickhouse.connection.per.executor.max", "5")
.config("spark.clickhouse.metrics.enable", "true")
.config("spark.clickhouse.socket.timeout.ms", "300000")
.config("spark.clickhouse.cluster.auto-discovery", "true")
.enableHiveSupport()
.getOrCreate()
try {
val sc = spark.sparkContext
import io.clickhouse.spark.connector._
val dt = mobvista.dmp.common.MobvistaConstant.sdf1.format(mobvista.dmp.common.MobvistaConstant.sdf2.parse(date))
val imeiQuery = Constant.alipay_lahuo_sql.replace("@end_date", dt_end_days).replace("@begin_date", dt).replace("@device_type", "IMEI_MD5").replace("@hour", hour)
val imeiRDD = sc.clickhouseTable(imeiQuery, cluster)
.withCustomPartitioning(Constant.buildPartUCLaHuo(Integer.parseInt(partNum)))
.map(r => {
r.getAs[String]("device_ids")
})
FileSystem.get(new URI(s"s3://mob-emr-test"), sc.hadoopConfiguration).delete(new Path(imeiOutput), true)
imeiRDD.distinct().coalesce(60)
.saveAsTextFile(imeiOutput, classOf[GzipCodec])
} finally {
if (spark != null) {
spark.stop()
}
}
0
}
}
object AlipayLaHuoDF {
def main(args: Array[String]): Unit = {
new AlipayLaHuoDF().run(args)
}
}