Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
06ade6c4
Commit
06ade6c4
authored
Jan 29, 2015
by
Harish Butani
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Expressions initial checkin
parent
657b74a4
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
672 additions
and
0 deletions
+672
-0
QueryDSL.org
docs/QueryDSL.org
+250
-0
Expressions.scala
.../scala/org/apache/hadoop/metadata/query/Expressions.scala
+0
-0
QueryProcessor.scala
...ala/org/apache/hadoop/metadata/query/QueryProcessor.scala
+41
-0
Resolver.scala
...ain/scala/org/apache/hadoop/metadata/query/Resolver.scala
+77
-0
TypeUtils.scala
...in/scala/org/apache/hadoop/metadata/query/TypeUtils.scala
+141
-0
ExpressionTest.scala
...ala/org/apache/hadoop/metadata/query/ExpressionTest.scala
+163
-0
No files found.
docs/QueryDSL.org
0 → 100644
View file @
06ade6c4
#+TITLE: Query DSL
#+AUTHOR: Harish Butani
#+EMAIL: hbutani@apache.org
#+LANGUAGE: en
#+INFOJS_OPT: view:showall toc:t ltoc:t mouse:underline path:http://orgmode.org/org-info.js
#+LINK_HOME: http://home.fnal.gov/~neilsen
#+LINK_UP: http://home.fnal.gov/~neilsen/notebook
#+HTML_HEAD: <link rel="stylesheet" type="text/css" href="http://orgmode.org/org-manual.css" />
#+LaTeX_CLASS: smarticle
#+LaTeX_HEADER: \pdfmapfile{/home/neilsen/texmf/fonts/map/dvips/libertine/libertine.map}
#+LaTeX_HEADER: \usepackage[ttscale=.875]{libertine}
#+LaTeX_HEADER: \usepackage{sectsty}
#+LaTeX_HEADER: \sectionfont{\normalfont\scshape}
#+LaTeX_HEADER: \subsectionfont{\normalfont\itshape}
#+EXPORT_SELECT_TAGS: export
#+EXPORT_EXCLUDE_TAGS: noexport
#+OPTIONS: H:2 num:nil toc:nil \n:nil @:t ::t |:t ^:{} _:{} *:t TeX:t LaTeX:t
#+STARTUP: showall
#+OPTIONS: html-postamble:nil
** Example Type Definitions
#+begin_src plantuml :file class_diagram.png
scale 1300 width
note left of Trait : traits are classifications/tags attached to Instances
class Trait
Trait <|-- JDbcAccess
Trait <|-- PII
Trait <|-- Dimension
Trait <|-- Metric
Trait <|-- ETL
class Object
Object --* Trait : traits >
Object <|-- DB
Object <|-- Table
Object <|-- Column
class DB {
name : String
owner : String
}
class StorageDescriptor {
inputFormat : String
outputFormat : String
}
class Column {
name : String
dataType : String
}
class Table {
name: String
db: DB
}
Table -> StorageDescriptor : storageDesc >
Table -> DB : db >
Column *-> StorageDescriptor : storageDesc >
class LoadProcess {
name String
}
LoadProcess -* Table : inputTables >
LoadProcess -> Table : outputTable >
class View {
name String
}
View -* Table : inputTables >
#+end_src
#+CAPTION: ETL and Reporting Scenario Types
#+LABEL: fig:sampleTypeDefs
#+results:
[[file:class_diagram.png]]
** Example Instance Graph
#+begin_src dot :file instanceGraph.png :cmdline -Kdot -Tpng
digraph G {
//size ="6 6";
nodesep=.2;
//rankdir=LR;
ranksep=.25;
node [shape=record fontsize=9];
compound=true;
subgraph cluster0 {
style=bold;
label = "Sales Database"; fontsize=18;
salesDB[label="DB(sales)"]
salesFact[label="Table(sales_fact)" style=filled; color="khaki"]
salesStorage[label="Storage(text,text)"]
sales_time_id[label="time_id" shape="circle" style=filled color="peachpuff"]
sales_product_id[label="product_id" shape="circle" style=filled color="peachpuff"]
sales_customer_id[label="customer_id" shape="circle" style=filled color="peachpuff"]
sales_sales[label="sales" shape="circle" style=filled color="peachpuff"]
sales_sales_metric[label="Metric" style=filled; shape="ellipse" color="turquoise"]
salesFact -> salesDB;
salesFact -> salesStorage;
sales_time_id -> salesStorage;
sales_product_id -> salesStorage;
sales_customer_id -> salesStorage;
sales_sales -> salesStorage;
sales_sales -> sales_sales_metric;
productDim[label="Table(product_dim)" style=filled; color="khaki"]
productStorage[label="Storage(text,text)"]
product_product_id[label="product_id" shape="circle" style=filled color="peachpuff"]
product_product_name[label="product_name" shape="circle" style=filled color="peachpuff"]
product_brand_name[label="brand_name" shape="circle" style=filled color="peachpuff"]
product_dimension[label="Dimension" style=filled; shape="ellipse" color="turquoise"]
productDim -> salesDB;
productDim -> productStorage;
product_product_id -> productStorage;
product_product_name -> productStorage;
product_brand_name -> productStorage;
productDim -> product_dimension;
productDim -> salesFact [style=invis];
timeDim[label="Table(time_dim)" style=filled; color="khaki"]
timeStorage[label="Storage(text,text)"]
time_time_id[label="time_id" shape="circle" style=filled color="peachpuff"]
time_dayOfYear[label="day_of_year" shape="circle" style=filled color="peachpuff"]
time_weekDay[label="week_day" shape="circle" style=filled color="peachpuff"]
time_dimension[label="Dimension" style=filled; shape="ellipse" color="turquoise"]
timeDim -> salesDB;
timeDim -> timeStorage;
time_time_id -> timeStorage;
time_dayOfYear -> timeStorage;
time_weekDay -> timeStorage;
timeDim -> time_dimension;
timeDim -> productDim [style=invis];
customerDim[label="Table(customer_dim)" style=filled; color="khaki"]
customerStorage[label="Storage(text,text)"]
customer_customer_id[label="customer_id" shape="circle" style=filled color="peachpuff"]
customer_name[label="name" shape="circle" style=filled color="peachpuff"]
customer_address[label="address" shape="circle" style=filled color="peachpuff"]
customer_dimension[label="Dimension" style=filled; shape="ellipse" color="turquoise"]
address_pii[label="PII" style=filled; shape="ellipse" color="turquoise"]
customerDim -> salesDB;
customerDim -> customerStorage;
customer_customer_id -> customerStorage;
customer_name -> customerStorage;
customer_address -> customerStorage;
customerDim -> customer_dimension;
customer_address -> address_pii;
customerDim -> timeDim [style=invis];
//{rank=min; salesDB};
{rank=min; salesDB};
};
subgraph cluster1 {
style=bold;
label = "Reporting Database"; fontsize=18;
reportingDB[label="DB(reporting)"]
salesFactDaily[label="Table(sales_daily_mv)" style=filled; color="khaki"]
salesDailyStorage[label="Storage(orc,orc)"]
salesD_time_id[label="time_id" shape="circle" style=filled color="peachpuff"]
salesD_product_id[label="product_id" shape="circle" style=filled color="peachpuff"]
salesD_customer_id[label="customer_id" shape="circle" style=filled color="peachpuff"]
salesD_sales[label="sales" shape="circle" style=filled color="peachpuff"]
salesD_sales_metric[label="Metric" style=filled; shape="ellipse" color="turquoise"]
salesFactDaily -> reportingDB;
salesFactDaily -> salesDailyStorage;
salesD_time_id -> salesDailyStorage;
salesD_product_id -> salesDailyStorage;
salesD_customer_id -> salesDailyStorage;
salesD_sales -> salesDailyStorage;
salesD_sales -> salesD_sales_metric;
salesFactDaily -> reportingDB [style=invis];
productDimView[label="View(product_dim_v)" style=filled; color="khaki"]
productDim -> productDimView [style=dotted];
productDimView_dim[label="Dimension" style=filled; shape="ellipse" color="turquoise"]
productDimView_jdbc[label="JdbcAccess" style=filled; shape="ellipse" color="turquoise"]
productDimView -> productDimView_dim;
productDimView -> productDimView_jdbc;
productDimView -> salesFactDaily [style=invis];
customerDimView[label="View(customer_dim_v)" style=filled; color="khaki"]
customerDim -> customerDimView [style=dotted];
customerDimView_dim[label="Dimension" style=filled; shape="ellipse" color="turquoise"]
customerDimView_jdbc[label="JdbcAccess" style=filled; shape="ellipse" color="turquoise"]
customerDimView -> customerDimView_dim;
customerDimView -> customerDimView_jdbc;
customerDimView -> salesFactDaily [style=invis];
salesMonthly[label="Table(sales_monthly_mv)" style=filled; color="khaki"]
salesMonthlyStorage[label="Storage(orc,orc)"]
salesM_time_id[label="time_id" shape="circle" style=filled color="peachpuff"]
salesM_product_id[label="product_id" shape="circle" style=filled color="peachpuff"]
salesM_customer_id[label="customer_id" shape="circle" style=filled color="peachpuff"]
salesM_sales[label="sales" shape="circle" style=filled color="peachpuff"]
salesM_sales_metric[label="Metric" style=filled; shape="ellipse" color="turquoise"]
salesMonthly -> reportingDB;
salesMonthly -> salesMonthlyStorage;
salesM_time_id -> salesMonthlyStorage;
salesM_product_id -> salesMonthlyStorage;
salesM_customer_id -> salesMonthlyStorage;
salesM_sales -> salesMonthlyStorage;
salesM_sales -> salesM_sales_metric;
salesMonthly -> customerDimView [style=invis];
{rank=min; reportingDB};
};
loadSalesDaily[label="LoadProcess(loadSalesDaily)" style=filled; color="seagreen"; shape="octagon"]
loadSalesDaily_etl[label="ETL" style=filled; shape="ellipse" color="turquoise"]
salesFact -> loadSalesDaily [style=dotted];
timeDim -> loadSalesDaily [style=dotted];
loadSalesDaily -> salesFactDaily [style=dotted];
loadSalesDaily -> loadSalesDaily_etl;
loadSalesMonthly[label="LoadProcess(loadSalesMonthly)" style=filled; color="seagreen"; shape="octagon"]
loadSalesMonthly_etl[label="ETL" style=filled; shape="ellipse" color="turquoise"]
salesFactDaily -> loadSalesMonthly [style=dotted];
timeDim -> loadSalesMonthly [style=dotted];
loadSalesMonthly -> salesMonthly [style=dotted];
loadSalesMonthly -> loadSalesMonthly_etl;
}
#+end_src
#+CAPTION: ETL and Reporting Scenario
#+LABEL: fig:sampleInstanceGraph
#+results:
[[file:instanceGraph.png]]
typesystem/src/main/scala/org/apache/hadoop/metadata/query/Expressions.scala
0 → 100644
View file @
06ade6c4
This diff is collapsed.
Click to expand it.
typesystem/src/main/scala/org/apache/hadoop/metadata/query/QueryProcessor.scala
0 → 100644
View file @
06ade6c4
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org.apache.hadoop.metadata.query
import
Expressions._
object
QueryProcessor
{
def
validate
(
e
:
Expression
)
:
Expression
=
{
val
e1
=
e
.
transformUp
(
new
Resolver
())
e1
.
traverseUp
{
case
x
:
Expression
if
!x.resolved
=>
throw
new
ExpressionException
(
x
,
s
"Failed to resolved expression $x"
)
}
/*
* trigger computation of dataType of expression tree
*/
e1
.
dataType
e1
}
}
typesystem/src/main/scala/org/apache/hadoop/metadata/query/Resolver.scala
0 → 100644
View file @
06ade6c4
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org.apache.hadoop.metadata.query
import
Expressions._
class
Resolver
(
srcExpr
:
Option
[
Expression
]
=
None
,
aliases
:
Map
[
String
,
Expression
]
=
Map
())
extends
PartialFunction
[
Expression
,
Expression
]
{
import
TypeUtils._
def
isDefinedAt
(
x
:
Expression
)
=
true
def
apply
(
e
:
Expression
)
:
Expression
=
e
match
{
case
idE
@IdExpression
(
name
)
=>
{
val
cType
=
resolveAsClassType
(
name
)
if
(
cType
.
isDefined
)
{
return
new
ClassExpression
(
name
)
}
val
tType
=
resolveAsTraitType
(
name
)
if
(
tType
.
isDefined
)
{
return
new
TraitExpression
(
name
)
}
if
(
srcExpr
.
isDefined
)
{
val
fInfo
=
resolveReference
(
srcExpr
.
get
.
dataType
,
name
)
if
(
fInfo
.
isDefined
)
{
return
new
FieldExpression
(
name
,
fInfo
.
get
,
None
)
}
}
val
backExpr
=
aliases
.
get
(
name
)
if
(
backExpr
.
isDefined
)
{
return
new
BackReference
(
name
,
backExpr
.
get
,
None
)
}
idE
}
case
f
@UnresolvedFieldExpression
(
child
,
fieldName
)
if
child
.
resolved
=>
{
var
fInfo
:
Option
[
FieldInfo
]
=
None
fInfo
=
resolveReference
(
child
.
dataType
,
fieldName
)
if
(
fInfo
.
isDefined
)
{
return
new
FieldExpression
(
fieldName
,
fInfo
.
get
,
Some
(
child
))
}
f
}
case
isTraitLeafExpression
(
traitName
,
classExpression
)
if
srcExpr
.
isDefined
&&
!
classExpression
.
isDefined
=>
isTraitLeafExpression
(
traitName
,
srcExpr
)
case
hasFieldLeafExpression
(
traitName
,
classExpression
)
if
srcExpr
.
isDefined
&&
!
classExpression
.
isDefined
=>
hasFieldLeafExpression
(
traitName
,
srcExpr
)
case
f
@FilterExpression
(
inputExpr
,
condExpr
)
if
inputExpr
.
resolved
=>
{
val
r
=
new
Resolver
(
Some
(
inputExpr
),
inputExpr
.
namedExpressions
)
return
new
FilterExpression
(
inputExpr
,
condExpr
.
transformUp
(
r
))
}
case
SelectExpression
(
child
,
selectList
)
if
child
.
resolved
=>
{
val
r
=
new
Resolver
(
Some
(
child
),
child
.
namedExpressions
)
return
new
SelectExpression
(
child
,
selectList
.
map
{
_
.
transformUp
(
r
)})
}
case
x
=>
x
}
}
typesystem/src/main/scala/org/apache/hadoop/metadata/query/TypeUtils.scala
0 → 100644
View file @
06ade6c4
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org.apache.hadoop.metadata.query
import
java.util.concurrent.atomic.AtomicInteger
import
org.apache.hadoop.metadata.MetadataException
import
org.apache.hadoop.metadata.types.DataTypes.PrimitiveType
import
org.apache.hadoop.metadata.types._
object
TypeUtils
{
val
typSystem
=
TypeSystem
.
getInstance
()
def
numericTypes
:
Seq
[
PrimitiveType
[
_
]]
=
Seq
(
DataTypes
.
BYTE_TYPE
,
DataTypes
.
SHORT_TYPE
,
DataTypes
.
INT_TYPE
,
DataTypes
.
FLOAT_TYPE
,
DataTypes
.
LONG_TYPE
,
DataTypes
.
DOUBLE_TYPE
,
DataTypes
.
BIGINTEGER_TYPE
,
DataTypes
.
BIGDECIMAL_TYPE
)
def
combinedType
(
typ1
:
IDataType
[
_
],
typ2
:
IDataType
[
_
])
:
PrimitiveType
[
_
]
=
{
val
typ1Idx
=
if
(
numericTypes
.
contains
(
typ1
))
Some
(
numericTypes
.
indexOf
(
typ1
))
else
None
val
typ2Idx
=
if
(
numericTypes
.
contains
(
typ2
))
Some
(
numericTypes
.
indexOf
(
typ2
))
else
None
if
(
typ1Idx
.
isDefined
&&
typ2Idx
.
isDefined
)
{
val
rIdx
=
math
.
max
(
typ1Idx
.
get
,
typ2Idx
.
get
)
if
(
(
typ1
==
DataTypes
.
FLOAT_TYPE
&&
typ2
==
DataTypes
.
LONG_TYPE
)
||
(
typ1
==
DataTypes
.
LONG_TYPE
&&
typ2
==
DataTypes
.
FLOAT_TYPE
)
)
{
return
DataTypes
.
DOUBLE_TYPE
}
return
numericTypes
(
rIdx
)
}
throw
new
MetadataException
(
s
"Cannot combine types: ${typ1.getName} and ${typ2.getName}"
)
}
var
tempStructCounter
:
AtomicInteger
=
new
AtomicInteger
(
0
)
val
TEMP_STRUCT_NAME_PREFIX
=
"__tempQueryResultStruct"
def
createStructType
(
selectExprs
:
List
[
Expressions.AliasExpression
])
:
StructType
=
{
val
aDefs
=
new
Array
[
AttributeDefinition
](
selectExprs
.
size
)
selectExprs
.
zipWithIndex
.
foreach
{
t
=>
val
(
e
,
i
)
=
t
aDefs
(
i
)
=
new
AttributeDefinition
(
e
.
alias
,
e
.
dataType
.
getName
,
Multiplicity
.
OPTIONAL
,
false
,
null
)
}
return
typSystem
.
defineQueryResultType
(
s
"${TEMP_STRUCT_NAME_PREFIX}${tempStructCounter.getAndIncrement}"
,
aDefs
:_
*
);
}
def
fieldMapping
(
iDataType
:
IDataType
[
_
])
:
Option
[
FieldMapping
]
=
iDataType
match
{
case
c
:
ClassType
=>
Some
(
c
.
fieldMapping
())
case
t
:
TraitType
=>
Some
(
t
.
fieldMapping
())
case
s
:
StructType
=>
Some
(
s
.
fieldMapping
())
case
_
=>
None
}
import
scala.language.existentials
case
class
FieldInfo
(
dataType
:
IDataType
[
_
],
attrInfo
:
AttributeInfo
,
reverseDataType
:
IDataType
[
_
]
=
null
)
{
def
isReverse
=
reverseDataType
!=
null
}
/**
* Given a ComposedType `t` and a name resolve using the following rules:
* - if `id` is a field in `t` resolve to the field
* - if `id` is the name of a Struct|Class|Trait Type and it has a field that is of type `t` then return that type
*
* For e.g.
* 1. if we have types Table(name : String, cols : List[Column]), Column(name : String) then
* `resolveReference(Table, "cols")` resolves to type Column. So a query can be "Table.cols"
* 2. But if we have Table(name : String), Column(name : String, tbl : Table) then "Table.Column" will resolve
* to type Column
*
* This way the language will support navigation even if the relationship is one-sided.
*
* @param typ
* @param id
* @return
*/
def
resolveReference
(
typ
:
IDataType
[
_
],
id
:
String
)
:
Option
[
FieldInfo
]
=
{
val
fMap
=
fieldMapping
(
typ
)
if
(
fMap
.
isDefined
)
{
if
(
fMap
.
get
.
fields
.
containsKey
(
id
))
{
return
Some
(
FieldInfo
(
typ
,
fMap
.
get
.
fields
.
get
(
id
)))
}
try
{
val
idTyp
=
typSystem
.
getDataType
(
classOf
[
IDataType
[
_
]],
id
)
val
idTypFMap
=
fieldMapping
(
idTyp
)
if
(
idTypFMap
.
isDefined
)
{
import
scala.collection.JavaConversions._
val
fields
:
Seq
[
AttributeInfo
]
=
idTypFMap
.
get
.
fields
.
values
().
filter
{
aInfo
=>
aInfo
.
dataType
()
==
typ
}.
toSeq
if
(
fields
.
size
==
1
)
{
return
Some
(
FieldInfo
(
typ
,
fields
(
0
),
idTyp
))
}
}
}
catch
{
case
_
:
MetadataException
=>
None
}
}
None
}
def
resolveAsClassType
(
id
:
String
)
:
Option
[
ClassType
]
=
{
try
{
Some
(
typSystem
.
getDataType
(
classOf
[
ClassType
],
id
))
}
catch
{
case
_
:
MetadataException
=>
None
}
}
def
resolveAsTraitType
(
id
:
String
)
:
Option
[
TraitType
]
=
{
try
{
Some
(
typSystem
.
getDataType
(
classOf
[
TraitType
],
id
))
}
catch
{
case
_
:
MetadataException
=>
None
}
}
}
typesystem/src/test/scala/org/apache/hadoop/metadata/query/ExpressionTest.scala
0 → 100644
View file @
06ade6c4
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org.apache.hadoop.metadata.query
import
com.google.common.collect.ImmutableList
import
org.apache.hadoop.metadata.BaseTest
import
org.apache.hadoop.metadata.types._
import
org.junit.
{
Before
,
Test
}
import
Expressions._
class
ExpressionTest
extends
BaseTest
{
@Before
override
def
setup
{
super
.
setup
def
attrDef
(
name
:
String
,
dT
:
IDataType
[
_
],
m
:
Multiplicity
=
Multiplicity
.
OPTIONAL
,
isComposite
:
Boolean
=
false
,
reverseAttributeName
:
String
=
null
)
=
{
require
(
name
!=
null
)
require
(
dT
!=
null
)
new
AttributeDefinition
(
name
,
dT
.
getName
,
m
,
isComposite
,
reverseAttributeName
)
}
def
dbClsDef
=
new
HierarchicalTypeDefinition
[
ClassType
](
classOf
[
ClassType
],
"DB"
,
null
,
Array
(
attrDef
(
"name"
,
DataTypes
.
STRING_TYPE
),
attrDef
(
"owner"
,
DataTypes
.
STRING_TYPE
)
))
def
storageDescClsDef
=
new
HierarchicalTypeDefinition
[
ClassType
](
classOf
[
ClassType
],
"StorageDesc"
,
null
,
Array
(
attrDef
(
"inputFormat"
,
DataTypes
.
STRING_TYPE
),
attrDef
(
"outputFormat"
,
DataTypes
.
STRING_TYPE
)
))
def
columnClsDef
=
new
HierarchicalTypeDefinition
[
ClassType
](
classOf
[
ClassType
],
"Column"
,
null
,
Array
(
attrDef
(
"name"
,
DataTypes
.
STRING_TYPE
),
attrDef
(
"dataType"
,
DataTypes
.
STRING_TYPE
),
new
AttributeDefinition
(
"sd"
,
"StorageDesc"
,
Multiplicity
.
REQUIRED
,
false
,
null
)
))
def
tblClsDef
=
new
HierarchicalTypeDefinition
[
ClassType
](
classOf
[
ClassType
],
"Table"
,
null
,
Array
(
attrDef
(
"name"
,
DataTypes
.
STRING_TYPE
),
new
AttributeDefinition
(
"db"
,
"DB"
,
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"sd"
,
"StorageDesc"
,
Multiplicity
.
REQUIRED
,
false
,
null
)
))
def
loadProcessClsDef
=
new
HierarchicalTypeDefinition
[
ClassType
](
classOf
[
ClassType
],
"LoadProcess"
,
null
,
Array
(
attrDef
(
"name"
,
DataTypes
.
STRING_TYPE
),
new
AttributeDefinition
(
"inputTables"
,
"Table"
,
Multiplicity
.
COLLECTION
,
false
,
null
),
new
AttributeDefinition
(
"outputTable"
,
"Table"
,
Multiplicity
.
REQUIRED
,
false
,
null
)
))
def
viewClsDef
=
new
HierarchicalTypeDefinition
[
ClassType
](
classOf
[
ClassType
],
"View"
,
null
,
Array
(
attrDef
(
"name"
,
DataTypes
.
STRING_TYPE
),
new
AttributeDefinition
(
"inputTables"
,
"Table"
,
Multiplicity
.
COLLECTION
,
false
,
null
)
))
def
dimTraitDef
=
new
HierarchicalTypeDefinition
[
TraitType
](
classOf
[
TraitType
],
"Dimension"
,
null
,
Array
[
AttributeDefinition
]())
def
piiTraitDef
=
new
HierarchicalTypeDefinition
[
TraitType
](
classOf
[
TraitType
],
"PII"
,
null
,
Array
[
AttributeDefinition
]())
def
metricTraitDef
=
new
HierarchicalTypeDefinition
[
TraitType
](
classOf
[
TraitType
],
"Metric"
,
null
,
Array
[
AttributeDefinition
]())
def
etlTraitDef
=
new
HierarchicalTypeDefinition
[
TraitType
](
classOf
[
TraitType
],
"ETL"
,
null
,
Array
[
AttributeDefinition
]())
def
jdbcTraitDef
=
new
HierarchicalTypeDefinition
[
TraitType
](
classOf
[
TraitType
],
"Jdbc"
,
null
,
Array
[
AttributeDefinition
]())
getTypeSystem
.
defineTypes
(
ImmutableList
.
of
[
StructTypeDefinition
],
ImmutableList
.
of
[
HierarchicalTypeDefinition
[
TraitType
]](
dimTraitDef
,
piiTraitDef
,
metricTraitDef
,
etlTraitDef
,
jdbcTraitDef
),
ImmutableList
.
of
[
HierarchicalTypeDefinition
[
ClassType
]](
dbClsDef
,
storageDescClsDef
,
columnClsDef
,
tblClsDef
,
loadProcessClsDef
,
viewClsDef
))
}
@Test
def
testClass
:
Unit
=
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
))
println
(
e
)
}
@Test
def
testFilter
:
Unit
=
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
where
(
id
(
"name"
).
`=`
(
string
(
"Reporting"
))))
println
(
e
)
}
@Test
def
testSelect
:
Unit
=
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
where
(
id
(
"name"
).
`=`
(
string
(
"Reporting"
))).
select
(
id
(
"name"
),
id
(
"owner"
)))
println
(
e
)
}
@Test
def
testNegTypeTest
:
Unit
=
{
try
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
where
(
id
(
"name"
)))
println
(
e
)
}
catch
{
case
e
:
ExpressionException
if
e.getMessage.endsWith
(
"
expression:
DB
where
name
"
)
=>
()
}
}
@Test
def
testIsTrait
:
Unit
=
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
where
(
isTrait
(
"Jdbc"
)))
println
(
e
)
}
@Test
def
testIsTraitNegative
:
Unit
=
{
try
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
where
(
isTrait
(
"Jdb"
)))
println
(
e
)
}
catch
{
case
e
:
ExpressionException
if
e.getMessage.endsWith
(
"
not
a
TraitType
,
expression:
is
Jdb
"
)
=>
()
}
}
@Test
def
testhasField
:
Unit
=
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
where
(
hasField
(
"name"
)))
println
(
e
)
}
@Test
def
testHasFieldNegative
:
Unit
=
{
try
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
where
(
hasField
(
"nam"
)))
println
(
e
)
}
catch
{
case
e
:
ExpressionException
if
e.getMessage.endsWith
(
"
not
a
TraitType
,
expression:
is
Jdb
"
)
=>
()
}
}
@Test
def
testFieldReference
:
Unit
=
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
field
(
"Table"
))
println
(
e
)
}
@Test
def
testBackReference
:
Unit
=
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
as
(
"db"
).
field
(
"Table"
)).
where
(
id
(
"db"
).
field
(
"name"
).
`=`
(
string
(
"Reporting"
)))
println
(
e
)
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment