Commit d7c74678 by Harish Butani

introduce field validator

parent 537cc23c
......@@ -48,7 +48,15 @@ object QueryProcessor {
*/
e1.dataType
e1
/*
* ensure fieldReferences match the input expression's dataType
*/
val e2 = e1.transformUp(FieldValidator)
val e3 = e2.transformUp(new Resolver())
e3.dataType
e3
}
}
......@@ -19,6 +19,7 @@
package org.apache.hadoop.metadata.query
import Expressions._
import org.apache.hadoop.metadata.types.IDataType
class Resolver(srcExpr : Option[Expression] = None, aliases : Map[String, Expression] = Map())
extends PartialFunction[Expression, Expression] {
......@@ -75,3 +76,43 @@ extends PartialFunction[Expression, Expression] {
case x => x
}
}
/**
* - any FieldReferences that explicitly reference the input, can be converted to implicit references
* - any FieldReferences that explicitly reference a
*/
object FieldValidator extends PartialFunction[Expression, Expression] {
def isDefinedAt(x: Expression) = true
def validateQualifiedField(srcDataType : IDataType[_]) : PartialFunction[Expression, Expression] = {
case FieldExpression(fNm, fInfo, Some(child)) if child.children == Nil && child.dataType == srcDataType =>
FieldExpression(fNm, fInfo, None)
case fe@FieldExpression(fNm, fInfo, Some(child)) if child.children == Nil =>
throw new ExpressionException(fe, s"srcType of field doesn't match input type")
case hasFieldUnaryExpression(fNm, child) if child.dataType == srcDataType =>
hasFieldLeafExpression(fNm)
case hF@hasFieldUnaryExpression(fNm, child) =>
throw new ExpressionException(hF, s"srcType of field doesn't match input type")
case isTraitUnaryExpression(fNm, child) if child.dataType == srcDataType =>
isTraitLeafExpression(fNm)
case iT@isTraitUnaryExpression(fNm, child) =>
throw new ExpressionException(iT, s"srcType of field doesn't match input type")
}
def apply(e : Expression) : Expression = e match {
case f@FilterExpression(inputExpr, condExpr) => {
val validatedCE = condExpr.transformUp(validateQualifiedField(inputExpr.dataType))
if ( validatedCE.fastEquals(condExpr) ) {
f
} else {
new FilterExpression(inputExpr, validatedCE)
}
}
case SelectExpression(child, selectList) if child.resolved => {
val v = validateQualifiedField(child.dataType)
return new SelectExpression(child, selectList.map{_.transformUp(v)})
}
case x => x
}
}
\ No newline at end of file
......@@ -92,6 +92,21 @@ class ExpressionTest extends BaseTest {
println(e)
}
@Test def testNegFieldReference: Unit = {
try {
val e = QueryProcessor.validate(_class("DB").where(_class("LoadProcess").hasField("name")))
println(e)
} catch {
case e : ExpressionException
if e.getMessage.endsWith("srcType of field doesn't match input type, expression: LoadProcess has name") => ()
}
}
@Test def testFieldReferenceRedundant: Unit = {
val e = QueryProcessor.validate(_class("DB").where(_class("DB").hasField("name")))
println(e)
}
@Test def testBackReference: Unit = {
val e = QueryProcessor.validate(
_class("DB").as("db").field("Table").where(id("db").field("name").`=`(string("Reporting"))))
......
......@@ -51,6 +51,12 @@ class GremlinTest extends FunSuite with BeforeAndAfterAll {
validateJson(r, "{\n \"query\":\"DB where (name = \\\"Reporting\\\")\",\n \"dataType\":{\n \"superTypes\":[\n \n ],\n \"hierarchicalMetaTypeName\":\"org.apache.hadoop.metadata.types.ClassType\",\n \"typeName\":\"DB\",\n \"attributeDefinitions\":[\n {\n \"name\":\"name\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"owner\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"createTime\",\n \"dataTypeName\":\"int\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n }\n ]\n },\n \"rows\":[\n {\n \"$typeName$\":\"DB\",\n \"$id$\":{\n \"id\":\"7168\",\n \"$typeName$\":\"DB\",\n \"version\":0\n },\n \"owner\":\"Jane BI\",\n \"name\":\"Reporting\",\n \"createTime\":1500\n }\n ]\n}")
}
test("testFilter2") {
var r = QueryProcessor.evaluate(_class("DB").where(id("DB").field("name").`=`(string("Reporting"))), g)
validateJson(r, "{\n \"query\":\"DB where (name = \\\"Reporting\\\")\",\n \"dataType\":{\n \"superTypes\":[\n \n ],\n \"hierarchicalMetaTypeName\":\"org.apache.hadoop.metadata.types.ClassType\",\n \"typeName\":\"DB\",\n \"attributeDefinitions\":[\n {\n \"name\":\"name\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"owner\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"createTime\",\n \"dataTypeName\":\"int\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n }\n ]\n },\n \"rows\":[\n {\n \"$typeName$\":\"DB\",\n \"$id$\":{\n \"id\":\"7168\",\n \"$typeName$\":\"DB\",\n \"version\":0\n },\n \"owner\":\"Jane BI\",\n \"name\":\"Reporting\",\n \"createTime\":1500\n }\n ]\n}")
}
test("testSelect") {
val r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting"))).
select(id("name"), id("owner")), g)
......
......@@ -92,7 +92,7 @@ object QueryTestsUtils extends GraphUtils {
def loadProcessClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "LoadProcess", null,
Array(
attrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("inputTables", "Table", Multiplicity.COLLECTION, false, null),
new AttributeDefinition("inputTables", DataTypes.arrayTypeName("Table"), Multiplicity.COLLECTION, false, null),
new AttributeDefinition("outputTable", "Table", Multiplicity.REQUIRED, false, null)
))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment