Commit 3397f569 by WangJinfeng

update m mysql url

parent c5e8abd9
...@@ -151,12 +151,12 @@ object AdnSdkDaily extends Serializable { ...@@ -151,12 +151,12 @@ object AdnSdkDaily extends Serializable {
val Requestid_Campaignid_df = spark.sql(sql01).select($"requestid".cast("string"), $"campaignid".cast("string")) val Requestid_Campaignid_df = spark.sql(sql01).select($"requestid".cast("string"), $"campaignid".cast("string"))
Requestid_Campaignid_df.createOrReplaceTempView("Requestid_Campaignid") Requestid_Campaignid_df.createOrReplaceTempView("Requestid_Campaignid")
// mysql -h adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com -P 3306 -u adnro -pYcM123glh // mysql -h adn-mysql-external.mobvista.com -P 3306 -u adnro -pYcM123glh
// 本来是spark访问mysql表campaign_list,但是读取数据时一直出现最后一个task持续运行不结束,可能时出现了数据倾斜。发现hive表dwh.ods_adn_campaign_list与mysql表数据相关联,最终采用hive表 // 本来是spark访问mysql表campaign_list,但是读取数据时一直出现最后一个task持续运行不结束,可能时出现了数据倾斜。发现hive表dwh.ods_adn_campaign_list与mysql表数据相关联,最终采用hive表
// val properties = new Properties() // val properties = new Properties()
// properties.put("user", "adnro") // properties.put("user", "adnro")
// properties.put("password", "YcM123glh") // properties.put("password", "YcM123glh")
// val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn" // val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
// val campaign_list_df = spark.read.jdbc(url, "campaign_list", "id", 1, 200000000, 500, properties) // val campaign_list_df = spark.read.jdbc(url, "campaign_list", "id", 1, 200000000, 500, properties)
// .select("id", "trace_app_id") // .select("id", "trace_app_id")
// .toDF("campaignid", "packagename") // .toDF("campaignid", "packagename")
......
...@@ -7,7 +7,7 @@ import org.apache.spark.sql.{SparkSession, _} ...@@ -7,7 +7,7 @@ import org.apache.spark.sql.{SparkSession, _}
import org.apache.spark.sql.types._ import org.apache.spark.sql.types._
/** /**
* *
* mysql -h adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com -P 3306 -u adnro -pYcM123glh * mysql -h adn-mysql-external.mobvista.com -P 3306 -u adnro -pYcM123glh
* *
*/ */
...@@ -128,7 +128,7 @@ object Offer_adr { ...@@ -128,7 +128,7 @@ object Offer_adr {
val properties = new Properties() val properties = new Properties()
properties.put("user", "adnro") properties.put("user", "adnro")
properties.put("password", "YcM123glh") properties.put("password", "YcM123glh")
val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn" val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
//val campaign_list_DF = sqlContext.read.jdbc(url, "campaign_list", properties).select("id","app_name", "platform", "preview_url").filter("platform='1'") //val campaign_list_DF = sqlContext.read.jdbc(url, "campaign_list", properties).select("id","app_name", "platform", "preview_url").filter("platform='1'")
val campaign_list_DF = sqlContext.read.jdbc(url, "campaign_list", "id", 1, 200000000, 500, properties).select("id", "app_name", "platform", "preview_url").filter("platform='1' and status=1") val campaign_list_DF = sqlContext.read.jdbc(url, "campaign_list", "id", 1, 200000000, 500, properties).select("id", "app_name", "platform", "preview_url").filter("platform='1' and status=1")
.toDF("id", "app_name", "platform", "track_view_url").coalesce(500) .toDF("id", "app_name", "platform", "track_view_url").coalesce(500)
......
...@@ -7,7 +7,7 @@ import org.apache.spark.sql.{SparkSession, _} ...@@ -7,7 +7,7 @@ import org.apache.spark.sql.{SparkSession, _}
import org.apache.spark.sql.types._ import org.apache.spark.sql.types._
/** /**
* *
* mysql -h adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com -P 3306 -u adnro -pYcM123glh * mysql -h adn-mysql-external.mobvista.com -P 3306 -u adnro -pYcM123glh
* *
*/ */
...@@ -315,7 +315,7 @@ object Offer_ios { ...@@ -315,7 +315,7 @@ object Offer_ios {
val properties = new Properties() val properties = new Properties()
properties.put("user", "adnro") properties.put("user", "adnro")
properties.put("password", "YcM123glh") properties.put("password", "YcM123glh")
val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn" val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
val campaign_list_DF = sqlContext.read.jdbc(url, "campaign_list", "id", 1, 200000000, 500, properties).select("id", "app_name", "platform", "preview_url").filter("platform='2' and status=1") val campaign_list_DF = sqlContext.read.jdbc(url, "campaign_list", "id", 1, 200000000, 500, properties).select("id", "app_name", "platform", "preview_url").filter("platform='2' and status=1")
.toDF("id", "app_name", "platform", "track_view_url").coalesce(500) .toDF("id", "app_name", "platform", "track_view_url").coalesce(500)
......
...@@ -7,7 +7,7 @@ import org.apache.spark.sql.{SparkSession, _} ...@@ -7,7 +7,7 @@ import org.apache.spark.sql.{SparkSession, _}
import org.apache.spark.sql.types._ import org.apache.spark.sql.types._
/** /**
* *
* mysql -h adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com -P 3306 -u adnro -pYcM123glh * mysql -h adn-mysql-external.mobvista.com -P 3306 -u adnro -pYcM123glh
* *
*/ */
...@@ -151,7 +151,7 @@ object Publish_adr { ...@@ -151,7 +151,7 @@ object Publish_adr {
val properties = new Properties() val properties = new Properties()
properties.put("user", "adnro") properties.put("user", "adnro")
properties.put("password", "YcM123glh") properties.put("password", "YcM123glh")
val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn" val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
val table_DF = sqlContext.read.jdbc(url, "publisher_channel", properties).select("channel_name", "platform", "confirmed_url", "sub_category_v2").filter("platform='1' and sub_category_v2!=''") val table_DF = sqlContext.read.jdbc(url, "publisher_channel", properties).select("channel_name", "platform", "confirmed_url", "sub_category_v2").filter("platform='1' and sub_category_v2!=''")
.toDF("app_name", "platform", "confirmed_url", "category_list") .toDF("app_name", "platform", "confirmed_url", "category_list")
val result_rdd = table_DF.rdd.map { p => val result_rdd = table_DF.rdd.map { p =>
......
...@@ -261,7 +261,7 @@ object Publish_ios { ...@@ -261,7 +261,7 @@ object Publish_ios {
val properties = new Properties() val properties = new Properties()
properties.put("user", "adnro") properties.put("user", "adnro")
properties.put("password", "YcM123glh") properties.put("password", "YcM123glh")
val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn" val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
val table_DF = sqlContext.read.jdbc(url, "publisher_channel", properties).select("channel_name", "platform", "confirmed_url", "sub_category_v2").filter("platform='2' and sub_category_v2!=''") val table_DF = sqlContext.read.jdbc(url, "publisher_channel", properties).select("channel_name", "platform", "confirmed_url", "sub_category_v2").filter("platform='2' and sub_category_v2!=''")
.toDF("app_name", "platform", "confirmed_url", "category_list") .toDF("app_name", "platform", "confirmed_url", "category_list")
val result_df = table_DF.rdd.map { p => val result_df = table_DF.rdd.map { p =>
......
...@@ -56,7 +56,7 @@ class AppsFlyerTotal extends CommonSparkJob { ...@@ -56,7 +56,7 @@ class AppsFlyerTotal extends CommonSparkJob {
val properties = new Properties() val properties = new Properties()
properties.put("user", "adnro") properties.put("user", "adnro")
properties.put("password", "YcM123glh") properties.put("password", "YcM123glh")
val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn" val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
sqlContext.read.jdbc(url, "appsflyer_audience", properties).select("id", "platform", "dmp_package").toDF("id", "platform", "dmp_package").createOrReplaceTempView("appsflyer_audience") sqlContext.read.jdbc(url, "appsflyer_audience", properties).select("id", "platform", "dmp_package").toDF("id", "platform", "dmp_package").createOrReplaceTempView("appsflyer_audience")
val sqlAfOrgDaily= val sqlAfOrgDaily=
......
package mobvista.dmp.datasource.newtag package mobvista.dmp.datasource.newtag
import java.net.URI
import java.util.Properties
import mobvista.dmp.common.CommonSparkJob import mobvista.dmp.common.CommonSparkJob
import mobvista.prd.datasource.util.GsonUtil import mobvista.prd.datasource.util.GsonUtil
import org.apache.commons.cli.Options import org.apache.commons.cli.Options
import org.apache.hadoop.conf.Configuration import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.{Row, SaveMode, SparkSession}
import java.util.Properties
import scala.collection.JavaConversions._ import scala.collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.ArrayBuffer
...@@ -109,7 +106,7 @@ class CalInterestTag extends CommonSparkJob with Serializable { ...@@ -109,7 +106,7 @@ class CalInterestTag extends CommonSparkJob with Serializable {
val properties = new Properties() val properties = new Properties()
properties.put("user", "adnro") properties.put("user", "adnro")
properties.put("password", "YcM123glh") properties.put("password", "YcM123glh")
val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn" val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
sqlContext.read.jdbc(url, "adv_events", properties).select("event_name").filter("adv_event_category_id in('6','47') ") // .filter("adv_event_category_id in ( '6','47')") sqlContext.read.jdbc(url, "adv_events", properties).select("event_name").filter("adv_event_category_id in('6','47') ") // .filter("adv_event_category_id in ( '6','47')")
.toDF("event_name").createOrReplaceTempView("adv_event_name_tab_tmp") .toDF("event_name").createOrReplaceTempView("adv_event_name_tab_tmp")
......
...@@ -51,7 +51,7 @@ class SettingTotal extends CommonSparkJob { ...@@ -51,7 +51,7 @@ class SettingTotal extends CommonSparkJob {
val properties = new Properties() val properties = new Properties()
properties.put("user", "adnro") properties.put("user", "adnro")
properties.put("password", "YcM123glh") properties.put("password", "YcM123glh")
val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn" val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
sqlContext.read.jdbc(url, "publisher_channel", properties).select("id", "package_name", "platform").toDF("id", "package_name", "platform").createOrReplaceTempView("publisher_channel") sqlContext.read.jdbc(url, "publisher_channel", properties).select("id", "package_name", "platform").toDF("id", "package_name", "platform").createOrReplaceTempView("publisher_channel")
val sqlDaily= val sqlDaily=
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment