diff --git a/src/main/scala/mobvista/dmp/datasource/adn_sdk/AdnSdkDaily.scala b/src/main/scala/mobvista/dmp/datasource/adn_sdk/AdnSdkDaily.scala
index 32c9bac..5c0bc4e 100644
--- a/src/main/scala/mobvista/dmp/datasource/adn_sdk/AdnSdkDaily.scala
+++ b/src/main/scala/mobvista/dmp/datasource/adn_sdk/AdnSdkDaily.scala
@@ -151,12 +151,12 @@ object AdnSdkDaily extends Serializable {
       val Requestid_Campaignid_df = spark.sql(sql01).select($"requestid".cast("string"), $"campaignid".cast("string"))
       Requestid_Campaignid_df.createOrReplaceTempView("Requestid_Campaignid")
 
-//      mysql -h adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com -P 3306 -u adnro  -pYcM123glh
+//      mysql -h adn-mysql-external.mobvista.com -P 3306 -u adnro  -pYcM123glh
 //      本来是spark访问mysql表campaign_list,但是读取数据时一直出现最后一个task持续运行不结束,可能时出现了数据倾斜。发现hive表dwh.ods_adn_campaign_list与mysql表数据相关联,最终采用hive表
 //      val properties = new Properties()
 //      properties.put("user", "adnro")
 //      properties.put("password", "YcM123glh")
-//      val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn"
+//      val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
 //      val campaign_list_df = spark.read.jdbc(url, "campaign_list", "id", 1, 200000000, 500, properties)
 //        .select("id", "trace_app_id")
 //        .toDF("campaignid", "packagename")
diff --git a/src/main/scala/mobvista/dmp/datasource/app_info_tag/Offer_adr.scala b/src/main/scala/mobvista/dmp/datasource/app_info_tag/Offer_adr.scala
index 862f5a9..aec4658 100644
--- a/src/main/scala/mobvista/dmp/datasource/app_info_tag/Offer_adr.scala
+++ b/src/main/scala/mobvista/dmp/datasource/app_info_tag/Offer_adr.scala
@@ -7,7 +7,7 @@ import org.apache.spark.sql.{SparkSession, _}
 import org.apache.spark.sql.types._
 /**
  *
- * mysql -h adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com -P 3306 -u adnro  -pYcM123glh
+ * mysql -h adn-mysql-external.mobvista.com -P 3306 -u adnro  -pYcM123glh
  *
  */
 
@@ -128,7 +128,7 @@ object Offer_adr {
       val properties = new Properties()
       properties.put("user", "adnro")
       properties.put("password", "YcM123glh")
-      val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn"
+      val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
       //val campaign_list_DF = sqlContext.read.jdbc(url, "campaign_list", properties).select("id","app_name", "platform", "preview_url").filter("platform='1'")
       val campaign_list_DF = sqlContext.read.jdbc(url, "campaign_list", "id", 1, 200000000, 500, properties).select("id", "app_name", "platform", "preview_url").filter("platform='1' and status=1")
         .toDF("id", "app_name", "platform", "track_view_url").coalesce(500)
diff --git a/src/main/scala/mobvista/dmp/datasource/app_info_tag/Offer_ios.scala b/src/main/scala/mobvista/dmp/datasource/app_info_tag/Offer_ios.scala
index 21fe4fd..7ff641d 100644
--- a/src/main/scala/mobvista/dmp/datasource/app_info_tag/Offer_ios.scala
+++ b/src/main/scala/mobvista/dmp/datasource/app_info_tag/Offer_ios.scala
@@ -7,7 +7,7 @@ import org.apache.spark.sql.{SparkSession, _}
 import org.apache.spark.sql.types._
 /**
  *
- * mysql -h adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com -P 3306 -u adnro  -pYcM123glh
+ * mysql -h adn-mysql-external.mobvista.com -P 3306 -u adnro  -pYcM123glh
  *
  */
 
@@ -315,7 +315,7 @@ object Offer_ios {
       val properties = new Properties()
       properties.put("user", "adnro")
       properties.put("password", "YcM123glh")
-      val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn"
+      val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
 
       val campaign_list_DF = sqlContext.read.jdbc(url, "campaign_list", "id", 1, 200000000, 500, properties).select("id", "app_name", "platform", "preview_url").filter("platform='2' and status=1")
         .toDF("id", "app_name", "platform", "track_view_url").coalesce(500)
diff --git a/src/main/scala/mobvista/dmp/datasource/app_info_tag/Publish_adr.scala b/src/main/scala/mobvista/dmp/datasource/app_info_tag/Publish_adr.scala
index 552e7f1..938415b 100644
--- a/src/main/scala/mobvista/dmp/datasource/app_info_tag/Publish_adr.scala
+++ b/src/main/scala/mobvista/dmp/datasource/app_info_tag/Publish_adr.scala
@@ -7,7 +7,7 @@ import org.apache.spark.sql.{SparkSession, _}
 import org.apache.spark.sql.types._
 /**
  *
- * mysql -h adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com -P 3306 -u adnro  -pYcM123glh
+ * mysql -h adn-mysql-external.mobvista.com -P 3306 -u adnro  -pYcM123glh
  *
  */
 
@@ -151,7 +151,7 @@ object Publish_adr {
       val properties = new Properties()
       properties.put("user", "adnro")
       properties.put("password", "YcM123glh")
-      val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn"
+      val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
       val table_DF = sqlContext.read.jdbc(url, "publisher_channel", properties).select("channel_name", "platform", "confirmed_url", "sub_category_v2").filter("platform='1' and sub_category_v2!=''")
         .toDF("app_name", "platform", "confirmed_url", "category_list")
       val result_rdd = table_DF.rdd.map { p =>
diff --git a/src/main/scala/mobvista/dmp/datasource/app_info_tag/Publish_ios.scala b/src/main/scala/mobvista/dmp/datasource/app_info_tag/Publish_ios.scala
index 7c08859..41060aa 100644
--- a/src/main/scala/mobvista/dmp/datasource/app_info_tag/Publish_ios.scala
+++ b/src/main/scala/mobvista/dmp/datasource/app_info_tag/Publish_ios.scala
@@ -261,7 +261,7 @@ object Publish_ios {
       val properties = new Properties()
       properties.put("user", "adnro")
       properties.put("password", "YcM123glh")
-      val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn"
+      val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
       val table_DF = sqlContext.read.jdbc(url, "publisher_channel", properties).select("channel_name", "platform", "confirmed_url", "sub_category_v2").filter("platform='2' and sub_category_v2!=''")
         .toDF("app_name", "platform", "confirmed_url", "category_list")
       val result_df = table_DF.rdd.map { p =>
diff --git a/src/main/scala/mobvista/dmp/datasource/appsflyer/AppsFlyerTotal.scala b/src/main/scala/mobvista/dmp/datasource/appsflyer/AppsFlyerTotal.scala
index 2682d8a..70f97e8 100644
--- a/src/main/scala/mobvista/dmp/datasource/appsflyer/AppsFlyerTotal.scala
+++ b/src/main/scala/mobvista/dmp/datasource/appsflyer/AppsFlyerTotal.scala
@@ -56,7 +56,7 @@ class AppsFlyerTotal extends CommonSparkJob {
       val properties = new Properties()
       properties.put("user", "adnro")
       properties.put("password", "YcM123glh")
-      val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn"
+      val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
       sqlContext.read.jdbc(url, "appsflyer_audience", properties).select("id", "platform", "dmp_package").toDF("id", "platform", "dmp_package").createOrReplaceTempView("appsflyer_audience")
 
       val sqlAfOrgDaily=
diff --git a/src/main/scala/mobvista/dmp/datasource/newtag/CalInterestTag.scala b/src/main/scala/mobvista/dmp/datasource/newtag/CalInterestTag.scala
index 540b8cd..6ae0b65 100644
--- a/src/main/scala/mobvista/dmp/datasource/newtag/CalInterestTag.scala
+++ b/src/main/scala/mobvista/dmp/datasource/newtag/CalInterestTag.scala
@@ -1,16 +1,13 @@
 package mobvista.dmp.datasource.newtag
 
 
-import java.net.URI
-import java.util.Properties
-
 import mobvista.dmp.common.CommonSparkJob
 import mobvista.prd.datasource.util.GsonUtil
 import org.apache.commons.cli.Options
 import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.{FileSystem, Path}
-import org.apache.spark.sql.{Row, SaveMode, SparkSession}
+import org.apache.spark.sql.{Row, SparkSession}
 
+import java.util.Properties
 import scala.collection.JavaConversions._
 import scala.collection.mutable.ArrayBuffer
 
@@ -109,7 +106,7 @@ class CalInterestTag extends CommonSparkJob with Serializable {
       val properties = new Properties()
       properties.put("user", "adnro")
       properties.put("password", "YcM123glh")
-      val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn"
+      val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
       sqlContext.read.jdbc(url, "adv_events", properties).select("event_name").filter("adv_event_category_id in('6','47') ")  // .filter("adv_event_category_id in ( '6','47')")
         .toDF("event_name").createOrReplaceTempView("adv_event_name_tab_tmp")
 
diff --git a/src/main/scala/mobvista/dmp/datasource/setting/SettingTotal.scala b/src/main/scala/mobvista/dmp/datasource/setting/SettingTotal.scala
index e94d130..aeeb07f 100644
--- a/src/main/scala/mobvista/dmp/datasource/setting/SettingTotal.scala
+++ b/src/main/scala/mobvista/dmp/datasource/setting/SettingTotal.scala
@@ -51,7 +51,7 @@ class SettingTotal extends CommonSparkJob {
       val properties = new Properties()
       properties.put("user", "adnro")
       properties.put("password", "YcM123glh")
-      val url = "jdbc:mysql://adn-data-foronlinetest.c5yzcdreb1xr.us-east-1.rds.amazonaws.com:3306/mob_adn"
+      val url = "jdbc:mysql://adn-mysql-external.mobvista.com:3306/mob_adn"
       sqlContext.read.jdbc(url, "publisher_channel", properties).select("id", "package_name", "platform").toDF("id", "package_name", "platform").createOrReplaceTempView("publisher_channel")
 
       val sqlDaily=