Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
S
sparkproject
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
lining
sparkproject
Commits
705f3251
Commit
705f3251
authored
Jun 28, 2022
by
lining
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
submit project
parents
Show whitespace changes
Inline
Side-by-side
Showing
32 changed files
with
2473 additions
and
0 deletions
+2473
-0
pom.xml
pom.xml
+72
-0
sparkproject.iml
sparkproject.iml
+3
-0
ESIndexConstant.java
src/main/java/com/lining/code/constant/ESIndexConstant.java
+68
-0
SQLConstant.java
src/main/java/com/lining/code/constant/SQLConstant.java
+418
-0
TableConstant.java
src/main/java/com/lining/code/constant/TableConstant.java
+10
-0
DataFlowEnum.java
src/main/java/com/lining/code/enums/DataFlowEnum.java
+39
-0
DataTypeEnum.java
src/main/java/com/lining/code/enums/DataTypeEnum.java
+36
-0
DayEnum.java
src/main/java/com/lining/code/enums/DayEnum.java
+27
-0
AdListModel03Action.java
...a/com/lining/code/spark/ad/model/AdListModel03Action.java
+99
-0
AdListModel07Action.java
...a/com/lining/code/spark/ad/model/AdListModel07Action.java
+98
-0
AdListModel30Action.java
...a/com/lining/code/spark/ad/model/AdListModel30Action.java
+98
-0
CompanyListModel03Action.java
...ng/code/spark/company/model/CompanyListModel03Action.java
+99
-0
CompanyListModel07Action.java
...ng/code/spark/company/model/CompanyListModel07Action.java
+99
-0
CompanyListModel30Action.java
...ng/code/spark/company/model/CompanyListModel30Action.java
+101
-0
CompanyModelAction.java
...m/lining/code/spark/company/model/CompanyModelAction.java
+57
-0
CountryListModel03Action.java
...ng/code/spark/country/model/CountryListModel03Action.java
+97
-0
CountryListModel07Action.java
...ng/code/spark/country/model/CountryListModel07Action.java
+97
-0
CountryListModel30Action.java
...ng/code/spark/country/model/CountryListModel30Action.java
+98
-0
DocumentModelAction.java
...lining/code/spark/document/model/DocumentModelAction.java
+58
-0
MaterialModelAction.java
...lining/code/spark/material/model/MaterialModelAction.java
+59
-0
PreplayMaterialModelAction.java
...code/spark/material/model/PreplayMaterialModelAction.java
+60
-0
ProductListModel03Action.java
...ng/code/spark/product/model/ProductListModel03Action.java
+101
-0
ProductListModel07Action.java
...ng/code/spark/product/model/ProductListModel07Action.java
+100
-0
ProductListModel30Action.java
...ng/code/spark/product/model/ProductListModel30Action.java
+101
-0
ProductModelAction.java
...m/lining/code/spark/product/model/ProductModelAction.java
+58
-0
CreativeTrendModelAction.java
...ning/code/spark/trend/model/CreativeTrendModelAction.java
+55
-0
MaterialTrendModelAction.java
...ning/code/spark/trend/model/MaterialTrendModelAction.java
+57
-0
DateUdf.java
src/main/java/com/lining/code/spark/udf/DateUdf.java
+22
-0
DateUtil.java
src/main/java/com/lining/code/util/DateUtil.java
+52
-0
PropertiesUtil.java
src/main/java/com/lining/code/util/PropertiesUtil.java
+65
-0
SparkSessionUtil.java
src/main/java/com/lining/code/util/SparkSessionUtil.java
+44
-0
config.properties
src/main/resources/config.properties
+25
-0
No files found.
pom.xml
0 → 100755
View file @
705f3251
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns=
"http://maven.apache.org/POM/4.0.0"
xmlns:xsi=
"http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation=
"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
>
<modelVersion>
4.0.0
</modelVersion>
<groupId>
com.lining.code
</groupId>
<artifactId>
spark-project
</artifactId>
<version>
1.0-SNAPSHOT
</version>
<build>
<plugins>
<plugin>
<groupId>
org.apache.maven.plugins
</groupId>
<artifactId>
maven-compiler-plugin
</artifactId>
<configuration>
<source>
8
</source>
<target>
8
</target>
</configuration>
</plugin>
</plugins>
</build>
<properties>
<scala.version>
2.11.12
</scala.version>
<spark.version>
2.1.1
</spark.version>
</properties>
<dependencies>
<dependency>
<groupId>
org.apache.spark
</groupId>
<artifactId>
spark-core_2.11
</artifactId>
<version>
${spark.version}
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.spark
</groupId>
<artifactId>
spark-sql_2.11
</artifactId>
<version>
${spark.version}
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<!-- Spark Streaming -->
<groupId>
org.apache.spark
</groupId>
<artifactId>
spark-streaming_2.11
</artifactId>
<version>
${spark.version}
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<!-- Spark Streaming -->
<groupId>
org.apache.spark
</groupId>
<artifactId>
spark-streaming_2.11
</artifactId>
<version>
${spark.version}
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.spark
</groupId>
<artifactId>
spark-hive_2.11
</artifactId>
<version>
2.1.1
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.elasticsearch
</groupId>
<artifactId>
elasticsearch-spark-20_2.11
</artifactId>
<version>
7.14.2
</version>
<scope>
provided
</scope>
</dependency>
</dependencies>
</project>
\ No newline at end of file
sparkproject.iml
0 → 100755
View file @
705f3251
<?xml version="1.0" encoding="UTF-8"?>
<module
type=
"JAVA_MODULE"
version=
"4"
/>
\ No newline at end of file
src/main/java/com/lining/code/constant/ESIndexConstant.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
constant
;
/**
* @ Author :lining.
* @ Date :Created in 21:25 2022/5/10
* @ Description:es索引目录
*/
public
class
ESIndexConstant
{
/**
* 素材详情index
*/
public
static
String
MATERIAL_INDEX
=
"material_model_{0}/_doc"
;
/**
* 可玩广告index
*/
public
static
String
PREPLAY_MATERIAL_INDEX
=
"preplay_material_model_{0}/_doc"
;
/**
* 文案index
*/
public
static
String
DOCUMENT_INDEX
=
"document_model_{0}/_doc"
;
/**
* 产品列表index
*/
public
static
String
PRODUCT_LIST_INDEX
=
"product_list_model_{0}_{1}/_doc"
;
public
static
String
PRODUCT_LIST_DATE_INDEX
=
"product_list_model_{0}_{1}_{2}/_doc"
;
/**
* 产品详情index
*/
public
static
String
PRODUCT_INDEX
=
"product_model_{0}/_doc"
;
/**
* 公司列表index
*/
public
static
String
COMPANY_LIST_INDEX
=
"company_list_model_{0}_{1}/_doc"
;
public
static
String
COMPANY_LIST_DATE_INDEX
=
"company_list_model_{0}_{1}_{2}/_doc"
;
/**
* 公司详情index
*/
public
static
String
COMPANY_INDEX
=
"company_model_{0}/_doc"
;
/**
* 广告变现列表index
*/
public
static
String
AD_LIST_INDEX
=
"ad_monetization_list_model_{0}_{1}/_doc"
;
public
static
String
AD_LIST_DATE_INDEX
=
"ad_monetization_list_model_{0}_{1}_{2}/_doc"
;
/**
* 国家列表index
*/
public
static
String
COUNTRY_LIST_INDEX
=
"country_list_model_{0}_{1}/_doc"
;
public
static
String
COUNTRY_LIST_DATE_INDEX
=
"country_list_model_{0}_{1}_{2}/_doc"
;
/**
* 素材趋势index
*/
public
static
String
MATERIAL_TREND_INDEX
=
"material_trend_model_{0}/_doc"
;
/**
* 创意趋势index
*/
public
static
String
CREATIVE_TREND_INDEX
=
"creative_trend_model_{0}/_doc"
;
}
src/main/java/com/lining/code/constant/SQLConstant.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
constant
;
/**
* @ Author :lining.
* @ Date :Created in 21:06 2022/5/10
* @ Description:sql
*/
public
class
SQLConstant
{
/**
* 素材详情sql
*/
public
static
String
MATERIAL_MODEL_SQL
=
"select \n"
+
"id, \n"
+
"cat_ids1,\n"
+
"cat_ids2,\n"
+
"cat_ids3,\n"
+
"cat_ids4,\n"
+
"class_ids,\n"
+
"tag_ids,\n"
+
"style_ids,\n"
+
"media_ids,\n"
+
"languages,\n"
+
"country_codes,\n"
+
"device_list,\n"
+
"appeal_type,\n"
+
"like_cnt,\n"
+
"comment_cnt,\n"
+
"share_cnt,\n"
+
"find_cnt,\n"
+
"creative_cnt,\n"
+
"product_cnt,\n"
+
"material_type,\n"
+
"material_format,\n"
+
"material_shape,\n"
+
"video_time_span,\n"
+
"width,\n"
+
"height,\n"
+
"conver_size,\n"
+
"has_conver,\n"
+
"conver_url,\n"
+
"image_url,\n"
+
"video_url,\n"
+
"size,\n"
+
"first_time,\n"
+
"last_time,\n"
+
"update_time,\n"
+
"hot_index,\n"
+
"interaction_index_list,\n"
+
"document_default,\n"
+
"document_ik_cn,\n"
+
"document_ik_en,\n"
+
"document_ik_kr,\n"
+
"document_ik_jp,\n"
+
"document_ik_other,\n"
+
"product_id,\n"
+
"product_name_default,\n"
+
"product_name_cn,\n"
+
"product_name_en,\n"
+
"product_name_kr,\n"
+
"product_name_jp,\n"
+
"product_name_other,\n"
+
"distribute_media_creative_list,\n"
+
"distribute_ua_creative_list,\n"
+
"distribute_product_creative_list,\n"
+
"distribute_country_creative_list,\n"
+
"distribute_material_admedia_list,\n"
+
"top_lasttime_document_ids,\n"
+
"top_cnt_document_ids,\n"
+
"company_info from adinsights.material_model_{1} where type_id = {0} and material_type not in (10,11);"
;
/**
* 可玩广告sql
*/
public
static
String
PREPLAY_MATERIAL_MODEL_SQL
=
"select \n"
+
"id,\n"
+
"cat_ids1,\n"
+
"cat_ids2,\n"
+
"cat_ids3,\n"
+
"cat_ids4,\n"
+
"class_ids,\n"
+
"tag_ids,\n"
+
"style_ids,\n"
+
"media_ids,\n"
+
"country_codes,\n"
+
"device_list,\n"
+
"creative_cnt,\n"
+
"find_cnt,\n"
+
"first_time,\n"
+
"last_time,\n"
+
"update_time,\n"
+
"company_info,\n"
+
"document_ik_cn,\n"
+
"document_ik_en,\n"
+
"document_ik_kr,\n"
+
"document_ik_jp,\n"
+
"document_ik_other,\n"
+
"document_default,\n"
+
"hot_index,\n"
+
"product_id,\n"
+
"distribute_media_creative_list,\n"
+
"distribute_ua_creative_list,\n"
+
"distribute_product_creative_list,\n"
+
"distribute_country_creative_list,\n"
+
"distribute_material_admedia_list,\n"
+
"top_lasttime_document_ids,\n"
+
"top_cnt_document_ids,\n"
+
"product_name_default,\n"
+
"product_name_cn,\n"
+
"product_name_en,\n"
+
"product_name_kr,\n"
+
"product_name_jp,\n"
+
"product_name_other,\n"
+
"image_url as play_html_url from adinsights.material_model_{1} where type_id = {0} and material_type =10"
;
/**
* 文案sql
*/
public
static
String
DOCUMENT_MODEL_SQL
=
"select \n"
+
"id,\n"
+
"cat_ids1,\n"
+
"cat_ids2,\n"
+
"cat_ids3,\n"
+
"cat_ids4,\n"
+
"class_ids,\n"
+
"tag_ids,\n"
+
"style_ids,\n"
+
"media_ids,\n"
+
"country_codes,\n"
+
"languages,\n"
+
"selling,\n"
+
"material_cnt,\n"
+
"creative_cnt,\n"
+
"find_cnt,\n"
+
"first_time,\n"
+
"last_time,\n"
+
"update_time,\n"
+
"product_name_default,\n"
+
"product_name_cn,\n"
+
"product_name_en,\n"
+
"product_name_kr,\n"
+
"product_name_jp,\n"
+
"product_name_other,\n"
+
"company_info,\n"
+
"subject_cn,\n"
+
"subject_en,\n"
+
"subject_jp,\n"
+
"subject_kr,\n"
+
"subject_other,\n"
+
"subject,\n"
+
"caption_cn,\n"
+
"caption_en,\n"
+
"caption_jp,\n"
+
"caption_kr,\n"
+
"caption_other,\n"
+
"caption,\n"
+
"product_id,\n"
+
"distribute_media_creative_list from adinsights.document_model_{1} where type_id = {0}"
;
/**
* 产品列表模型
*/
public
static
String
PRODUCT_LIST_MODEL_SQL
=
"select\n"
+
"id,\n"
+
"product_id,\n"
+
"product_first_time,\n"
+
"cat_ids1,\n"
+
"cat_ids2,\n"
+
"cat_ids3,\n"
+
"cat_ids4,\n"
+
"class_ids,\n"
+
"tag_ids,\n"
+
"style_ids,\n"
+
"media_id,\n"
+
"country_code,\n"
+
"device,\n"
+
"monetization,\n"
+
"selling,\n"
+
"seaput_status,\n"
+
"material_cnt,\n"
+
"preplay_material_cnt,\n"
+
"creative_cnt,\n"
+
"find_cnt,\n"
+
"first_time,\n"
+
"last_time,\n"
+
"product_name_cn,\n"
+
"product_name_en,\n"
+
"product_name_kr,\n"
+
"product_name_jp,\n"
+
"product_name_other,\n"
+
"product_name_default,\n"
+
"product_name_list,\n"
+
"company_id,\n"
+
"company_name,\n"
+
"pkg,\n"
+
"distribute_country_creative_list,\n"
+
"distribute_country_material_list,\n"
+
"distribute_media_material_list,\n"
+
"distribute_media_creative_list,\n"
+
"distribute_material_type_material_list,\n"
+
"distribute_image_size_material_list,\n"
+
"distribute_video_size_material_list,\n"
+
"distribute_language_material_list,\n"
+
"distribute_device_material_list,\n"
+
"distribute_device_creative_list\n"
+
" from adinsights.product_list_model_{0} where type_id = {1}"
;
/**
* 产品详情sql
*/
public
static
String
PRODUCT_MODEL_SQL
=
"select\n"
+
"id,\n"
+
"cat_ids1,\n"
+
"cat_ids2,\n"
+
"cat_ids3,\n"
+
"cat_ids4,\n"
+
"material_cnt,\n"
+
"preplay_material_cnt,\n"
+
"creative_cnt,\n"
+
"find_cnt,\n"
+
"first_time,\n"
+
"last_time,\n"
+
"product_name_default,\n"
+
"class_ids,\n"
+
"tag_ids,\n"
+
"media_ids,\n"
+
"country_codes,\n"
+
"device_list,\n"
+
"product_name_list,\n"
+
"product_name_cn,\n"
+
"product_name_en,\n"
+
"product_name_kr,\n"
+
"product_name_jp,\n"
+
"product_name_other,\n"
+
"product_logo,\n"
+
"bundle_id,\n"
+
"ios_download_url,\n"
+
"android_download_url,\n"
+
"company_id,\n"
+
"company_name,\n"
+
"pkg,\n"
+
"distribute_country_creative_list,\n"
+
"distribute_country_material_list,\n"
+
"distribute_media_material_list,\n"
+
"distribute_media_creative_list,\n"
+
"distribute_material_type_material_list,\n"
+
"distribute_image_size_material_list,\n"
+
"distribute_video_size_material_list,\n"
+
"distribute_language_material_list,\n"
+
"distribute_device_material_list,\n"
+
"distribute_device_creative_list from adinsights.product_model_{1} where type_id = {0}"
;
/**
* 公司列表sql
*/
public
static
String
COMPANY_LIST_MODEL_SQL
=
"select \n"
+
"id,\n"
+
"company_id,\n"
+
"company_first_time,\n"
+
"cat_ids1,\n"
+
"cat_ids2,\n"
+
"cat_ids3,\n"
+
"cat_ids4,\n"
+
"class_ids,\n"
+
"tag_ids,\n"
+
"style_ids,\n"
+
"media_id,\n"
+
"country_code,\n"
+
"device,\n"
+
"product_name_cn,\n"
+
"product_name_en,\n"
+
"product_name_kr,\n"
+
"product_name_jp,\n"
+
"product_name_other,\n"
+
"product_cnt,\n"
+
"material_cnt,\n"
+
"creative_cnt,\n"
+
"find_cnt,\n"
+
"first_time,\n"
+
"last_time,\n"
+
"update_time,\n"
+
"company_name,\n"
+
"distribute_country_creative_list,\n"
+
"distribute_country_material_list,\n"
+
"distribute_product_material_list,\n"
+
"distribute_product_creative_list,\n"
+
"distribute_device_material_list,\n"
+
"distribute_device_creative_list from adinsights.company_list_model_{0} where type_id = {1}"
;
/**
* 公司详情sql
*/
public
static
String
COMPANY_MODEL_SQL
=
"select \n"
+
"id,\n"
+
"cat_ids1,\n"
+
"cat_ids2,\n"
+
"cat_ids3,\n"
+
"cat_ids4,\n"
+
"class_ids,\n"
+
"tag_ids,\n"
+
"media_ids,\n"
+
"country_codes,\n"
+
"device_list,\n"
+
"product_cnt,\n"
+
"material_cnt,\n"
+
"creative_cnt,\n"
+
"find_cnt,\n"
+
"first_time,\n"
+
"last_time,\n"
+
"update_time,\n"
+
"company_name,\n"
+
"distribute_country_creative_list,\n"
+
"distribute_country_material_list,\n"
+
"distribute_product_material_list,\n"
+
"distribute_product_creative_list,\n"
+
"distribute_device_material_list,\n"
+
"distribute_device_creative_list from adinsights.company_model_{1} where type_id = {0}"
;
/**
* 广告变现列表sql
*/
public
static
String
AD_LIST_MODEL_SQL
=
"select \n"
+
"id,\n"
+
"admedia_id,\n"
+
"cat_ids1,\n"
+
"cat_ids2,\n"
+
"cat_ids3,\n"
+
"cat_ids4,\n"
+
"class_ids,\n"
+
"tag_ids,\n"
+
"style_ids,\n"
+
"creative_cnt,\n"
+
"first_time,\n"
+
"last_time,\n"
+
"distribute_media_material_list,\n"
+
"distribute_media_creative_list,\n"
+
"top_product_list from adinsights.ad_monetization_list_model_{0} where type_id = {1}"
;
/**
* 国家列表sql
*/
public
static
String
COUNTRY_LIST_MODEL_SQL
=
"select \n"
+
"id,\n"
+
"geo,\n"
+
"cat_ids1,\n"
+
"cat_ids2,\n"
+
"cat_ids3,\n"
+
"cat_ids4,\n"
+
"class_ids,\n"
+
"tag_ids,\n"
+
"style_ids,\n"
+
"device,\n"
+
"product_cnt,\n"
+
"material_cnt,\n"
+
"creative_cnt,\n"
+
"first_time,\n"
+
"last_time,\n"
+
"top_class_material_list,\n"
+
"top_class_creative_list,\n"
+
"top_product_list from adinsights.country_list_model_{0} where type_id = {1}"
;
/**
* 素材趋势sql
*/
public
static
String
MATERIAL_TREND_MODEL_SQL
=
"select\n"
+
"id,\n"
+
"day,\n"
+
"product_id,\n"
+
"company_id,\n"
+
"media_id,\n"
+
"geo,\n"
+
"device,\n"
+
"ad_style,\n"
+
"material_type,\n"
+
"cnt from adinsights.material_trend_model where type_id = {0}"
;
/**
* 创意趋势sql
*/
public
static
String
CREATIVE_TREND_MODEL_SQL
=
"select\n"
+
"id,\n"
+
"day,\n"
+
"product_id,\n"
+
"company_id,\n"
+
"media_id,\n"
+
"geo,\n"
+
"device,\n"
+
"ad_style,\n"
+
"material_type,\n"
+
"cnt from adinsights.creative_trend_model where type_id = {0}"
;
}
src/main/java/com/lining/code/constant/TableConstant.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
constant
;
/**
* @ Author :lining.
* @ Date :Created in 9:49 2022/5/16
* @ Description:table目录
*/
public
class
TableConstant
{
public
static
final
String
PRODUCT_LIST_TABLE
=
""
;
}
src/main/java/com/lining/code/enums/DataFlowEnum.java
0 → 100644
View file @
705f3251
package
com
.
lining
.
code
.
enums
;
/**
* @ Author :lining.
* @ Date :Created in 08:52 2022/6/25
* @ Description:数据流类型:real:实时数据 history:历史全量数据
*/
public
enum
DataFlowEnum
{
HISTORY
(
"history"
),
REAL
(
"real"
);
private
String
type
;
DataFlowEnum
(
String
type
){
this
.
type
=
type
;
}
public
static
DataFlowEnum
getDataFlow
(
String
type
){
DataFlowEnum
[]
values
=
DataFlowEnum
.
values
();
for
(
DataFlowEnum
dataFlow
:
values
){
if
(
dataFlow
.
type
.
equals
(
type
)){
return
dataFlow
;
}
}
return
DataFlowEnum
.
HISTORY
;
}
public
String
getType
()
{
return
type
;
}
public
void
setType
(
String
type
)
{
this
.
type
=
type
;
}
}
src/main/java/com/lining/code/enums/DataTypeEnum.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
enums
;
/**
* @ Author :lining.
* @ Date :Created in 21:06 2022/5/10
* @ Description:数据分类
*/
public
enum
DataTypeEnum
{
GAME
(
1
,
"game"
),
APP
(
2
,
"app"
);
private
int
type
;
private
String
name
;
DataTypeEnum
(
int
type
,
String
name
)
{
this
.
type
=
type
;
this
.
name
=
name
;
}
public
int
getType
()
{
return
type
;
}
public
void
setType
(
int
type
)
{
this
.
type
=
type
;
}
public
String
getName
()
{
return
name
;
}
public
void
setName
(
String
name
)
{
this
.
name
=
name
;
}
}
src/main/java/com/lining/code/enums/DayEnum.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
enums
;
/**
* @ Author :lining.
* @ Date :Created in 21:06 2022/5/10
* @ Description:日期分类
*/
public
enum
DayEnum
{
DAY_03
(
"03"
),
DAY_07
(
"07"
),
DAY_30
(
"30"
)
;
private
String
name
;
DayEnum
(
String
name
){
this
.
name
=
name
;
}
public
String
getName
()
{
return
name
;
}
public
void
setName
(
String
name
)
{
this
.
name
=
name
;
}
}
src/main/java/com/lining/code/spark/ad/model/AdListModel03Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
ad
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 18:12 2022/5/11
* @ Description:广告列表模型数据同步
*/
public
class
AdListModel03Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"AdListModel03Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
if
(
args
!=
null
&&
args
.
length
>
0
){
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
private
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_03
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_03
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
private
static
void
syncData
(
SparkSession
spark
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_03
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_03
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/ad/model/AdListModel07Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
ad
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 18:12 2022/5/11
* @ Description:广告列表模型数据同步
*/
public
class
AdListModel07Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"AdListModel07Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
if
(
args
!=
null
&&
args
.
length
>
0
)
{
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
private
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_07
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_07
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
private
static
void
syncData
(
SparkSession
spark
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_07
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_07
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/ad/model/AdListModel30Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
ad
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 18:12 2022/5/11
* @ Description:广告列表模型数据同步
*/
public
class
AdListModel30Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"AdListModel30Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
if
(
args
!=
null
&&
args
.
length
>
0
){
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
private
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_30
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_30
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
private
static
void
syncData
(
SparkSession
spark
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_30
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
AD_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
AD_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_30
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/company/model/CompanyListModel03Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
company
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.PropertiesUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 22:30 2022/5/9
* @ Description:公司列表数据同步
*/
public
class
CompanyListModel03Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"CompanyListModel03Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
if
(
args
!=
null
&&
args
.
length
>
0
){
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
private
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_03
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_03
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
private
static
void
syncData
(
SparkSession
spark
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_03
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_03
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/company/model/CompanyListModel07Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
company
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 22:30 2022/5/9
* @ Description:公司列表数据同步
*/
public
class
CompanyListModel07Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"CompanyListModel07Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
if
(
args
!=
null
&&
args
.
length
>
0
){
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
private
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_07
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_07
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
private
static
void
syncData
(
SparkSession
spark
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_07
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_07
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/company/model/CompanyListModel30Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
company
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 22:30 2022/5/9
* @ Description:公司列表数据同步
*/
public
class
CompanyListModel30Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"CompanyListModel30Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
if
(
args
!=
null
&&
args
.
length
>
0
){
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
private
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_30
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_30
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
private
static
void
syncData
(
SparkSession
spark
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_30
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_30
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/company/model/CompanyModelAction.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
company
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataFlowEnum
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 18:03 2022/5/11
* @ Description:公司详情数据同步
*/
public
class
CompanyModelAction
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"CompanyModelAction"
);
String
type
=
args
[
0
];
DataFlowEnum
dataFlow
=
DataFlowEnum
.
getDataFlow
(
type
);
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_MODEL_SQL
,
DataTypeEnum
.
GAME
.
getType
(),
dataFlow
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_INDEX
,
DataTypeEnum
.
GAME
.
getName
());
Dataset
<
Row
>
game
=
spark
.
sql
(
gameSQL
);
game
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COMPANY_MODEL_SQL
,
DataTypeEnum
.
APP
.
getType
(),
dataFlow
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COMPANY_INDEX
,
DataTypeEnum
.
APP
.
getName
());
Dataset
<
Row
>
app
=
spark
.
sql
(
appSQL
);
app
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
spark
.
stop
();
}
}
src/main/java/com/lining/code/spark/country/model/CountryListModel03Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
country
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 22:30 2022/5/9
* @ Description:国家列表数据同步
*/
public
class
CountryListModel03Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"CountryListModel03Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
if
(
args
!=
null
&&
args
.
length
>
0
){
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
private
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_03
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_03
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
private
static
void
syncData
(
SparkSession
spark
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_03
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_03
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/country/model/CountryListModel07Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
country
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 22:30 2022/5/9
* @ Description:国家列表数据同步
*/
public
class
CountryListModel07Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"CountryListModel07Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
if
(
args
!=
null
&&
args
.
length
>
0
){
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
private
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_07
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_07
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
private
static
void
syncData
(
SparkSession
spark
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_07
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_07
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/country/model/CountryListModel30Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
country
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 22:30 2022/5/9
* @ Description:国家列表数据同步
*/
public
class
CountryListModel30Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"CountryListModel30Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
if
(
args
!=
null
&&
args
.
length
>
0
){
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
private
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_30
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_30
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
private
static
void
syncData
(
SparkSession
spark
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_30
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
COUNTRY_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
COUNTRY_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_30
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/document/model/DocumentModelAction.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
document
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataFlowEnum
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 16:31 2022/5/8
* @ Description:文案模型同步
*/
public
class
DocumentModelAction
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
sparkSession
=
SparkSessionUtil
.
getSparkSession
(
"DocumentModelAction"
);
String
type
=
args
[
0
];
DataFlowEnum
dataFlow
=
DataFlowEnum
.
getDataFlow
(
type
);
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
DOCUMENT_MODEL_SQL
,
DataTypeEnum
.
GAME
.
getType
(),
dataFlow
.
getType
());
Dataset
<
Row
>
gameSet
=
sparkSession
.
sql
(
gameSQL
);
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
DOCUMENT_INDEX
,
DataTypeEnum
.
GAME
.
getName
());
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
DOCUMENT_MODEL_SQL
,
DataTypeEnum
.
APP
.
getType
(),
dataFlow
.
getType
());
Dataset
<
Row
>
appSet
=
sparkSession
.
sql
(
appSQL
);
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
DOCUMENT_INDEX
,
DataTypeEnum
.
APP
.
getName
());
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
sparkSession
.
stop
();
}
}
src/main/java/com/lining/code/spark/material/model/MaterialModelAction.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
material
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataFlowEnum
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 22:30 2022/5/9
* @ Description:素材模型数据同步
*/
public
class
MaterialModelAction
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
sparkSession
=
SparkSessionUtil
.
getSparkSession
(
"MaterialModelAction"
);
String
type
=
args
[
0
];
DataFlowEnum
dataFlow
=
DataFlowEnum
.
getDataFlow
(
type
);
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
MATERIAL_MODEL_SQL
,
DataTypeEnum
.
GAME
.
getType
(),
dataFlow
.
getType
());
Dataset
<
Row
>
gameSet
=
sparkSession
.
sql
(
gameSQL
);
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
MATERIAL_INDEX
,
DataTypeEnum
.
GAME
.
getName
());
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
MATERIAL_MODEL_SQL
,
DataTypeEnum
.
APP
.
getType
(),
dataFlow
.
getType
());
Dataset
<
Row
>
appSet
=
sparkSession
.
sql
(
appSQL
);
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
MATERIAL_INDEX
,
DataTypeEnum
.
APP
.
getName
());
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
sparkSession
.
stop
();
}
}
src/main/java/com/lining/code/spark/material/model/PreplayMaterialModelAction.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
material
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataFlowEnum
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 18:07 2022/5/11
* @ Description:可玩广告模型数据同步
*/
public
class
PreplayMaterialModelAction
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
sparkSession
=
SparkSessionUtil
.
getSparkSession
(
"PreplayMaterialModelAction"
);
String
type
=
args
[
0
];
DataFlowEnum
dataFlow
=
DataFlowEnum
.
getDataFlow
(
type
);
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
PREPLAY_MATERIAL_MODEL_SQL
,
DataTypeEnum
.
GAME
.
getType
(),
dataFlow
.
getType
());
Dataset
<
Row
>
gameSet
=
sparkSession
.
sql
(
gameSQL
);
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PREPLAY_MATERIAL_INDEX
,
DataTypeEnum
.
GAME
.
getName
());
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
/*String appSQL = MessageFormat.format(SQLConstant.PREPLAY_MATERIAL_MODEL_SQL,
DataTypeEnum.APP.getType(), dataFlow.getType());
Dataset<Row> appSet = sparkSession.sql(appSQL);
String appIndex = MessageFormat.format(ESIndexConstant.PREPLAY_MATERIAL_INDEX,
DataTypeEnum.APP.getName());
appSet.write()
.format("org.elasticsearch.spark.sql")
.mode(SaveMode.Append)
.option("es.mapping.id", "id")
.save(appIndex);*/
sparkSession
.
stop
();
}
}
src/main/java/com/lining/code/spark/product/model/ProductListModel03Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
product
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 22:30 2022/5/9
* @ Description:产品列表数据同步
*/
public
class
ProductListModel03Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"ProductListModel03Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
// spark.udf().register("DateFormatUdf", new MyUdf(), DataTypes.StringType);
if
(
args
!=
null
&&
args
.
length
>
0
){
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
public
static
void
syncData
(
SparkSession
spark
){
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_03
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_03
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
public
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
){
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_03
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_03
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_03
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/product/model/ProductListModel07Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
product
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 22:30 2022/5/9
* @ Description:产品列表数据同步
*/
public
class
ProductListModel07Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"ProductListModel07Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
if
(
args
!=
null
&&
args
.
length
>
0
){
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
private
static
void
syncData
(
SparkSession
spark
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_07
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_07
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
private
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_07
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_07
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_07
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/product/model/ProductListModel30Action.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
product
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.enums.DayEnum
;
import
com.lining.code.util.DateUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 22:30 2022/5/9
* @ Description:产品列表数据同步
*/
public
class
ProductListModel30Action
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"ProductListModel30Action"
);
String
date
=
DateUtil
.
getCurrentDate
();
if
(
args
!=
null
&&
args
.
length
>
0
){
date
=
args
[
0
];
syncDataByDate
(
spark
,
date
);
}
else
{
syncData
(
spark
);
}
spark
.
stop
();
}
private
static
void
syncDataByDate
(
SparkSession
spark
,
String
date
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_DATE_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_30
.
getName
(),
date
);
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_DATE_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_30
.
getName
(),
date
);
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
private
static
void
syncData
(
SparkSession
spark
)
{
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
GAME
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_INDEX
,
DataTypeEnum
.
GAME
.
getName
(),
DayEnum
.
DAY_30
.
getName
());
Dataset
<
Row
>
gameSet
=
spark
.
sql
(
gameSQL
);
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_LIST_MODEL_SQL
,
DayEnum
.
DAY_30
.
getName
(),
DataTypeEnum
.
APP
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_LIST_INDEX
,
DataTypeEnum
.
APP
.
getName
(),
DayEnum
.
DAY_30
.
getName
());
Dataset
<
Row
>
appSet
=
spark
.
sql
(
appSQL
);
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Overwrite
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
}
}
src/main/java/com/lining/code/spark/product/model/ProductModelAction.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
product
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataFlowEnum
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 16:31 2022/5/8
* @ Description:产品同步
*/
public
class
ProductModelAction
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
spark
=
SparkSessionUtil
.
getSparkSession
(
"ProductModelAction"
);
String
type
=
args
[
0
];
DataFlowEnum
dataFlow
=
DataFlowEnum
.
getDataFlow
(
type
);
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_MODEL_SQL
,
DataTypeEnum
.
GAME
.
getType
(),
dataFlow
.
getType
());
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_INDEX
,
DataTypeEnum
.
GAME
.
getName
());
Dataset
<
Row
>
game
=
spark
.
sql
(
gameSQL
);
game
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
PRODUCT_MODEL_SQL
,
DataTypeEnum
.
APP
.
getType
(),
dataFlow
.
getType
());
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
PRODUCT_INDEX
,
DataTypeEnum
.
APP
.
getName
());
Dataset
<
Row
>
app
=
spark
.
sql
(
appSQL
);
app
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
spark
.
stop
();
}
}
src/main/java/com/lining/code/spark/trend/model/CreativeTrendModelAction.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
trend
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataFlowEnum
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 16:31 2022/5/8
* @ Description:创意组趋势同步
*/
public
class
CreativeTrendModelAction
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
sparkSession
=
SparkSessionUtil
.
getSparkSession
(
"CreativeTrendModelAction"
);
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
CREATIVE_TREND_MODEL_SQL
,
DataTypeEnum
.
GAME
.
getType
());
Dataset
<
Row
>
gameSet
=
sparkSession
.
sql
(
gameSQL
);
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
CREATIVE_TREND_INDEX
,
DataTypeEnum
.
GAME
.
getName
());
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
CREATIVE_TREND_MODEL_SQL
,
DataTypeEnum
.
APP
.
getType
());
Dataset
<
Row
>
appSet
=
sparkSession
.
sql
(
appSQL
);
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
CREATIVE_TREND_INDEX
,
DataTypeEnum
.
APP
.
getName
());
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
sparkSession
.
stop
();
}
}
src/main/java/com/lining/code/spark/trend/model/MaterialTrendModelAction.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
trend
.
model
;
import
com.lining.code.constant.ESIndexConstant
;
import
com.lining.code.constant.SQLConstant
;
import
com.lining.code.enums.DataFlowEnum
;
import
com.lining.code.enums.DataTypeEnum
;
import
com.lining.code.util.PropertiesUtil
;
import
com.lining.code.util.SparkSessionUtil
;
import
org.apache.spark.api.java.function.ForeachFunction
;
import
org.apache.spark.sql.Dataset
;
import
org.apache.spark.sql.Row
;
import
org.apache.spark.sql.SaveMode
;
import
org.apache.spark.sql.SparkSession
;
import
java.text.MessageFormat
;
/**
* @ Author :lining.
* @ Date :Created in 16:31 2022/5/8
* @ Description:素材趋势模型同步
*/
public
class
MaterialTrendModelAction
{
public
static
void
main
(
String
[]
args
)
{
SparkSession
sparkSession
=
SparkSessionUtil
.
getSparkSession
(
"MaterialTrendModelAction"
);
String
gameSQL
=
MessageFormat
.
format
(
SQLConstant
.
MATERIAL_TREND_MODEL_SQL
,
DataTypeEnum
.
GAME
.
getType
());
Dataset
<
Row
>
gameSet
=
sparkSession
.
sql
(
gameSQL
);
String
gameIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
MATERIAL_TREND_INDEX
,
DataTypeEnum
.
GAME
.
getName
());
gameSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
gameIndex
);
String
appSQL
=
MessageFormat
.
format
(
SQLConstant
.
MATERIAL_TREND_MODEL_SQL
,
DataTypeEnum
.
APP
.
getType
());
Dataset
<
Row
>
appSet
=
sparkSession
.
sql
(
appSQL
);
String
appIndex
=
MessageFormat
.
format
(
ESIndexConstant
.
MATERIAL_TREND_INDEX
,
DataTypeEnum
.
APP
.
getName
());
appSet
.
write
()
.
format
(
"org.elasticsearch.spark.sql"
)
.
mode
(
SaveMode
.
Append
)
.
option
(
"es.mapping.id"
,
"id"
)
.
save
(
appIndex
);
sparkSession
.
stop
();
}
}
src/main/java/com/lining/code/spark/udf/DateUdf.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
spark
.
udf
;
import
com.lining.code.util.DateUtil
;
import
org.apache.spark.sql.api.java.UDF1
;
import
java.sql.Timestamp
;
import
java.util.Date
;
/**
* @ Author :lining.
* @ Date :Created in 14:51 2022/5/16
* @ Description:日期转换UDF
*/
public
class
DateUdf
implements
UDF1
<
Timestamp
,
String
>
{
@Override
public
String
call
(
Timestamp
timestamp
)
throws
Exception
{
Date
date
=
new
Date
(
timestamp
.
getTime
());
return
DateUtil
.
format
(
date
,
DateUtil
.
YYYY_MM_DD_HH_mm_ss
);
}
}
src/main/java/com/lining/code/util/DateUtil.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
util
;
import
java.text.SimpleDateFormat
;
import
java.time.LocalDate
;
import
java.time.LocalDateTime
;
import
java.time.format.DateTimeFormatter
;
import
java.util.Date
;
/**
* @ Author :lining.
* @ Date :Created in 10:48 2022/3/15
* @ Description:日期工具类
*/
public
class
DateUtil
{
public
static
final
String
YYYYMMDD
=
"yyyyMMdd"
;
public
static
final
String
YYYY_MM_DD_HH_mm_ss
=
"yyyy-MM-dd HH:mm:ss"
;
/**
* 获取当前日期 yyyy-MM-dd
* @return
*/
public
static
String
getCurrentDate
(){
return
LocalDate
.
now
().
format
(
DateTimeFormatter
.
ofPattern
(
YYYYMMDD
));
}
/**
* 获取当前日期 yyyy-MM-dd HH:mm:ss
* @return
*/
public
static
String
format
(
Date
date
,
String
format
){
SimpleDateFormat
dateFromat
=
new
SimpleDateFormat
();
dateFromat
.
applyPattern
(
format
);
return
dateFromat
.
format
(
date
);
}
/**
* 获取当前日期,自定义格式
* @param format
* @return
*/
public
static
String
currentTimeFormat
(
String
format
){
return
LocalDateTime
.
now
().
format
(
DateTimeFormatter
.
ofPattern
(
format
));
}
public
static
void
main
(
String
[]
args
)
{
String
format
=
format
(
new
Date
(),
YYYY_MM_DD_HH_mm_ss
);
System
.
out
.
println
(
format
);
}
}
src/main/java/com/lining/code/util/PropertiesUtil.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
util
;
import
org.apache.commons.lang.StringUtils
;
import
java.io.InputStream
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @ Author :lining.
* @ Date :Created in 16:32 2022/5/8
* @ Description:配置属性读取
*/
public
class
PropertiesUtil
{
private
static
Properties
prop
=
null
;
static
{
//获取配置
try
{
prop
=
new
Properties
();
ClassLoader
loader
=
Thread
.
currentThread
().
getContextClassLoader
();
InputStream
inputStream
=
loader
.
getResourceAsStream
(
"config.properties"
);
prop
.
load
(
inputStream
);
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
}
}
public
static
String
getProperty
(
String
property
){
return
prop
.
getProperty
(
property
);
}
public
static
String
getProperty
(
String
property
,
String
defaultValue
){
String
value
=
prop
.
getProperty
(
property
);
if
(
value
==
null
||
""
.
equals
(
value
)){
value
=
defaultValue
;
}
return
value
;
}
public
static
String
getStrValue
(
Map
map
,
String
key
)
{
if
(
map
==
null
||
map
.
isEmpty
()
||
StringUtils
.
isBlank
(
key
))
{
return
""
;
}
Object
t
=
map
.
get
(
key
);
if
(
t
!=
null
)
{
if
(
t
instanceof
Integer
){
return
t
+
""
;
}
return
t
.
toString
();
}
else
{
for
(
Object
o
:
map
.
keySet
())
{
String
name
=
(
String
)
o
;
if
(
name
.
toLowerCase
().
equals
(
key
.
toLowerCase
()))
{
Object
value
=
map
.
get
(
o
);
if
(
value
==
null
)
{
return
""
;
}
return
value
.
toString
();
}
}
}
return
""
;
}
}
src/main/java/com/lining/code/util/SparkSessionUtil.java
0 → 100755
View file @
705f3251
package
com
.
lining
.
code
.
util
;
import
org.apache.spark.sql.SparkSession
;
/**
* @ Author :lining.
* @ Date :Created in 20:57 2022/5/10
* @ Description:spark session 会话工具类
*/
public
class
SparkSessionUtil
{
public
static
SparkSession
getSparkSession
(
String
appName
){
SparkSession
sparkSession
=
org
.
apache
.
spark
.
sql
.
SparkSession
.
builder
().
appName
(
appName
)
// .master("local[*]")
.
config
(
"spark.files.ignoreMissingFiles"
,
true
)
//解决分区损坏问题
.
config
(
"spark.hadoop.yarn.timeline-service.enabled"
,
false
)
//java.lang.NoClassDefFoundError: com/sun/jersey/api/client/config/ClientConfig
.
config
(
"spark.sql.broadcastTimeout"
,
PropertiesUtil
.
getProperty
(
"spark.sql.broadcastTimeout"
,
"3600"
))
.
config
(
"spark.network.timeout"
,
PropertiesUtil
.
getProperty
(
"spark.network.timeout"
))
.
config
(
"spark.sql.parquet.compression.codec"
,
PropertiesUtil
.
getProperty
(
"spark.sql.parquet.compression.codec"
,
"gzip"
))
.
config
(
"es.index.auto.create"
,
"true"
)
//不自动建立,提早建立索引
.
config
(
"es.nodes"
,
PropertiesUtil
.
getProperty
(
"es.nodes"
))
.
config
(
"es.port"
,
PropertiesUtil
.
getProperty
(
"es.port"
))
.
config
(
"es.net.http.auth.user"
,
PropertiesUtil
.
getProperty
(
"es.net.http.auth.user"
))
.
config
(
"es.net.http.auth.pass"
,
PropertiesUtil
.
getProperty
(
"es.net.http.auth.pass"
))
.
config
(
"es.batch.size.bytes"
,
PropertiesUtil
.
getProperty
(
"es.batch.size.bytes"
))
.
config
(
"es.batch.size.entries"
,
PropertiesUtil
.
getProperty
(
"es.batch.size.entries"
,
"100"
))
// 100 解决Could not write all entries
.
config
(
"es.batch.write.refresh"
,
PropertiesUtil
.
getProperty
(
"es.batch.write.refresh"
,
"false"
))
.
config
(
"es.batch.write.retry.count"
,
PropertiesUtil
.
getProperty
(
"es.batch.write.retry.count"
,
"30"
))
// 3 解决Could not write all entries
.
config
(
"es.batch.write.retry.wait"
,
PropertiesUtil
.
getProperty
(
"es.batch.write.retry.wait"
,
"100"
))
// 10s 解决Could not write all entries
.
config
(
"es.http.timeout"
,
PropertiesUtil
.
getProperty
(
"es.http.timeout"
))
.
config
(
"es.http.retries"
,
PropertiesUtil
.
getProperty
(
"es.http.retries"
))
.
config
(
"es.action.heart.beat.lead"
,
PropertiesUtil
.
getProperty
(
"es.action.heart.beat.lead"
))
.
config
(
"es.nodes.wan.only"
,
PropertiesUtil
.
getProperty
(
"es.nodes.wan.only"
,
"true"
))
.
config
(
"es.nodes.data.only"
,
PropertiesUtil
.
getProperty
(
"es.nodes.data.only"
,
"true"
))
.
config
(
"es.nodes.discovery"
,
PropertiesUtil
.
getProperty
(
"es.nodes.discovery"
,
"true"
))
.
config
(
"es.input.use.sliced.partitions"
,
PropertiesUtil
.
getProperty
(
"es.input.use.sliced.partitions"
,
"50000"
))
.
config
(
"es.input.max.docs.per.partition"
,
PropertiesUtil
.
getProperty
(
"es.input.max.docs.per.partition"
,
"100000"
))
.
config
(
"es.net.http.header.Accept-Languag"
,
PropertiesUtil
.
getProperty
(
"es.net.http.header.Accept-Languag"
,
"gzip"
))
.
enableHiveSupport
().
getOrCreate
();
return
sparkSession
;
}
}
src/main/resources/config.properties
0 → 100755
View file @
705f3251
#spark配置
spark.sql.broadcastTimeout
=
3600
spark.network.timeout
=
1400s
spark.sql.parquet.compression.codec
=
snappy
################################################################
#es配置
es.nodes
=
10.1.0.59
es.port
=
9200
es.net.http.auth.user
=
elastic
es.net.http.auth.pass
=
Reyun@123
es.batch.size.bytes
=
20000000
es.batch.size.entries
=
5000
es.batch.write.refresh
=
true
es.batch.write.retry.count
=
50
es.batch.write.retry.wait
=
500
es.http.timeout
=
5m
es.http.retries
=
50
es.action.heart.beat.lead
=
50
es.nodes.wan.only
=
true
es.nodes.data.only
=
false
es.nodes.discovery
=
false
es.input.use.sliced.partitions
=
50000
es.input.max.docs.per.partition
=
100000
es.net.http.header.Accept-Languag
=
gzip
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment