Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
d7c74678
Commit
d7c74678
authored
Feb 03, 2015
by
Harish Butani
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
introduce field validator
parent
537cc23c
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
73 additions
and
2 deletions
+73
-2
QueryProcessor.scala
...ala/org/apache/hadoop/metadata/query/QueryProcessor.scala
+9
-1
Resolver.scala
...ain/scala/org/apache/hadoop/metadata/query/Resolver.scala
+42
-0
ExpressionTest.scala
...ala/org/apache/hadoop/metadata/query/ExpressionTest.scala
+15
-0
GremlinTest.scala
.../scala/org/apache/hadoop/metadata/query/GremlinTest.scala
+6
-0
QueryTestsUtils.scala
...la/org/apache/hadoop/metadata/query/QueryTestsUtils.scala
+1
-1
No files found.
typesystem/src/main/scala/org/apache/hadoop/metadata/query/QueryProcessor.scala
View file @
d7c74678
...
...
@@ -48,7 +48,15 @@ object QueryProcessor {
*/
e1
.
dataType
e1
/*
* ensure fieldReferences match the input expression's dataType
*/
val
e2
=
e1
.
transformUp
(
FieldValidator
)
val
e3
=
e2
.
transformUp
(
new
Resolver
())
e3
.
dataType
e3
}
}
typesystem/src/main/scala/org/apache/hadoop/metadata/query/Resolver.scala
View file @
d7c74678
...
...
@@ -19,6 +19,7 @@
package
org.apache.hadoop.metadata.query
import
Expressions._
import
org.apache.hadoop.metadata.types.IDataType
class
Resolver
(
srcExpr
:
Option
[
Expression
]
=
None
,
aliases
:
Map
[
String
,
Expression
]
=
Map
())
extends
PartialFunction
[
Expression
,
Expression
]
{
...
...
@@ -75,3 +76,43 @@ extends PartialFunction[Expression, Expression] {
case
x
=>
x
}
}
/**
* - any FieldReferences that explicitly reference the input, can be converted to implicit references
* - any FieldReferences that explicitly reference a
*/
object
FieldValidator
extends
PartialFunction
[
Expression
,
Expression
]
{
def
isDefinedAt
(
x
:
Expression
)
=
true
def
validateQualifiedField
(
srcDataType
:
IDataType
[
_
])
:
PartialFunction
[
Expression
,
Expression
]
=
{
case
FieldExpression
(
fNm
,
fInfo
,
Some
(
child
))
if
child
.
children
==
Nil
&&
child
.
dataType
==
srcDataType
=>
FieldExpression
(
fNm
,
fInfo
,
None
)
case
fe
@FieldExpression
(
fNm
,
fInfo
,
Some
(
child
))
if
child
.
children
==
Nil
=>
throw
new
ExpressionException
(
fe
,
s
"srcType of field doesn't match input type"
)
case
hasFieldUnaryExpression
(
fNm
,
child
)
if
child
.
dataType
==
srcDataType
=>
hasFieldLeafExpression
(
fNm
)
case
hF
@hasFieldUnaryExpression
(
fNm
,
child
)
=>
throw
new
ExpressionException
(
hF
,
s
"srcType of field doesn't match input type"
)
case
isTraitUnaryExpression
(
fNm
,
child
)
if
child
.
dataType
==
srcDataType
=>
isTraitLeafExpression
(
fNm
)
case
iT
@isTraitUnaryExpression
(
fNm
,
child
)
=>
throw
new
ExpressionException
(
iT
,
s
"srcType of field doesn't match input type"
)
}
def
apply
(
e
:
Expression
)
:
Expression
=
e
match
{
case
f
@FilterExpression
(
inputExpr
,
condExpr
)
=>
{
val
validatedCE
=
condExpr
.
transformUp
(
validateQualifiedField
(
inputExpr
.
dataType
))
if
(
validatedCE
.
fastEquals
(
condExpr
)
)
{
f
}
else
{
new
FilterExpression
(
inputExpr
,
validatedCE
)
}
}
case
SelectExpression
(
child
,
selectList
)
if
child
.
resolved
=>
{
val
v
=
validateQualifiedField
(
child
.
dataType
)
return
new
SelectExpression
(
child
,
selectList
.
map
{
_
.
transformUp
(
v
)})
}
case
x
=>
x
}
}
\ No newline at end of file
typesystem/src/test/scala/org/apache/hadoop/metadata/query/ExpressionTest.scala
View file @
d7c74678
...
...
@@ -92,6 +92,21 @@ class ExpressionTest extends BaseTest {
println
(
e
)
}
@Test
def
testNegFieldReference
:
Unit
=
{
try
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
where
(
_class
(
"LoadProcess"
).
hasField
(
"name"
)))
println
(
e
)
}
catch
{
case
e
:
ExpressionException
if
e
.
getMessage
.
endsWith
(
"srcType of field doesn't match input type, expression: LoadProcess has name"
)
=>
()
}
}
@Test
def
testFieldReferenceRedundant
:
Unit
=
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
where
(
_class
(
"DB"
).
hasField
(
"name"
)))
println
(
e
)
}
@Test
def
testBackReference
:
Unit
=
{
val
e
=
QueryProcessor
.
validate
(
_class
(
"DB"
).
as
(
"db"
).
field
(
"Table"
).
where
(
id
(
"db"
).
field
(
"name"
).
`=`
(
string
(
"Reporting"
))))
...
...
typesystem/src/test/scala/org/apache/hadoop/metadata/query/GremlinTest.scala
View file @
d7c74678
...
...
@@ -51,6 +51,12 @@ class GremlinTest extends FunSuite with BeforeAndAfterAll {
validateJson
(
r
,
"{\n \"query\":\"DB where (name = \\\"Reporting\\\")\",\n \"dataType\":{\n \"superTypes\":[\n \n ],\n \"hierarchicalMetaTypeName\":\"org.apache.hadoop.metadata.types.ClassType\",\n \"typeName\":\"DB\",\n \"attributeDefinitions\":[\n {\n \"name\":\"name\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"owner\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"createTime\",\n \"dataTypeName\":\"int\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n }\n ]\n },\n \"rows\":[\n {\n \"$typeName$\":\"DB\",\n \"$id$\":{\n \"id\":\"7168\",\n \"$typeName$\":\"DB\",\n \"version\":0\n },\n \"owner\":\"Jane BI\",\n \"name\":\"Reporting\",\n \"createTime\":1500\n }\n ]\n}"
)
}
test
(
"testFilter2"
)
{
var
r
=
QueryProcessor
.
evaluate
(
_class
(
"DB"
).
where
(
id
(
"DB"
).
field
(
"name"
).
`=`
(
string
(
"Reporting"
))),
g
)
validateJson
(
r
,
"{\n \"query\":\"DB where (name = \\\"Reporting\\\")\",\n \"dataType\":{\n \"superTypes\":[\n \n ],\n \"hierarchicalMetaTypeName\":\"org.apache.hadoop.metadata.types.ClassType\",\n \"typeName\":\"DB\",\n \"attributeDefinitions\":[\n {\n \"name\":\"name\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"owner\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"createTime\",\n \"dataTypeName\":\"int\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n }\n ]\n },\n \"rows\":[\n {\n \"$typeName$\":\"DB\",\n \"$id$\":{\n \"id\":\"7168\",\n \"$typeName$\":\"DB\",\n \"version\":0\n },\n \"owner\":\"Jane BI\",\n \"name\":\"Reporting\",\n \"createTime\":1500\n }\n ]\n}"
)
}
test
(
"testSelect"
)
{
val
r
=
QueryProcessor
.
evaluate
(
_class
(
"DB"
).
where
(
id
(
"name"
).
`=`
(
string
(
"Reporting"
))).
select
(
id
(
"name"
),
id
(
"owner"
)),
g
)
...
...
typesystem/src/test/scala/org/apache/hadoop/metadata/query/QueryTestsUtils.scala
View file @
d7c74678
...
...
@@ -92,7 +92,7 @@ object QueryTestsUtils extends GraphUtils {
def
loadProcessClsDef
=
new
HierarchicalTypeDefinition
[
ClassType
](
classOf
[
ClassType
],
"LoadProcess"
,
null
,
Array
(
attrDef
(
"name"
,
DataTypes
.
STRING_TYPE
),
new
AttributeDefinition
(
"inputTables"
,
"Table"
,
Multiplicity
.
COLLECTION
,
false
,
null
),
new
AttributeDefinition
(
"inputTables"
,
DataTypes
.
arrayTypeName
(
"Table"
)
,
Multiplicity
.
COLLECTION
,
false
,
null
),
new
AttributeDefinition
(
"outputTable"
,
"Table"
,
Multiplicity
.
REQUIRED
,
false
,
null
)
))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment