Commit 8db8b5c7 by apoorvnaik Committed by Madhan Neethiraj

ATLAS-2229: DSL implementation using ANTLR - #4 (multiple fixes)

parent 5384a742
......@@ -102,7 +102,8 @@ public enum AtlasErrorCode {
INVALID_ENTITY_FOR_CLASSIFICATION (400, "ATLAS-400-00-055", "Entity (guid=‘{0}‘,typename=‘{1}‘) cannot be classified by Classification ‘{2}‘, because ‘{1}‘ is not in the ClassificationDef's restrictions."),
SAVED_SEARCH_CHANGE_USER(400, "ATLAS-400-00-056", "saved-search {0} can not be moved from user {1} to {2}"),
INVALID_QUERY_PARAM_LENGTH(400, "ATLAS-400-00-057" , "Length of query param {0} exceeds the limit"),
INVALID_QUERY_LENGTH(400, "ATLAS-400-00-057" , "Invalid query length, update {0} to change the limit" ),
INVALID_QUERY_LENGTH(400, "ATLAS-400-00-058" , "Invalid query length, update {0} to change the limit" ),
INVALID_DSL_QUERY(400, "ATLAS-400-00-059" , "Invalid DSL query: {0} Reason: {1}. Please refer to Atlas DSL grammar for more information" ),
// All Not found enums go here
TYPE_NAME_NOT_FOUND(404, "ATLAS-404-00-001", "Given typename {0} was invalid"),
......
......@@ -33,12 +33,9 @@ import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.instance.AtlasObjectId;
import org.apache.atlas.model.profile.AtlasUserSavedSearch;
import org.apache.atlas.query.Expressions.Expression;
import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.query.GremlinQuery;
import org.apache.atlas.query.GremlinTranslator;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.query.QueryParser;
import org.apache.atlas.query.QueryProcessor;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper;
......@@ -679,15 +676,7 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
private GremlinQuery toGremlinQuery(String query, int limit, int offset) throws AtlasBaseException {
QueryParams params = validateSearchParams(limit, offset);
Expression expression = QueryParser.apply(query, params);
if (expression == null) {
throw new AtlasBaseException(DISCOVERY_QUERY_FAILED, query);
}
QueryProcessor queryProcessor = new QueryProcessor(typeRegistry, limit, offset);
Expression validExpression = queryProcessor.validate(expression);
GremlinQuery gremlinQuery = new GremlinTranslator(queryProcessor, validExpression).translate();
GremlinQuery gremlinQuery = new AtlasDSL.Translator(AtlasDSL.Parser.parse(query), typeRegistry, params.offset(), params.limit()).translate();
if (LOG.isDebugEnabled()) {
LOG.debug("Translated Gremlin Query: {}", gremlinQuery.queryStr());
......
......@@ -17,12 +17,17 @@
*/
package org.apache.atlas.query;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
import org.antlr.v4.runtime.TokenStream;
import org.apache.atlas.query.Expressions.Expression;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.query.antlr4.AtlasDSLLexer;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -33,8 +38,10 @@ import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class QueryParser {
private static final Logger LOG = LoggerFactory.getLogger(QueryParser.class);
public class AtlasDSL {
public static class Parser {
private static final Logger LOG = LoggerFactory.getLogger(Parser.class);
private static final Set<String> RESERVED_KEYWORDS =
new HashSet<>(Arrays.asList("[", "]", "(", ")", "=", "<", ">", "!=", "<=", ">=", ",", "and", "or", "+", "-",
......@@ -46,21 +53,83 @@ public class QueryParser {
return RESERVED_KEYWORDS.contains(word);
}
public static Expression apply(String queryStr, QueryParams params) {
Expression ret = null;
public static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
AtlasDSLParser.QueryContext ret;
try {
InputStream stream = new ByteArrayInputStream(queryStr.getBytes());
AtlasDSLLexer lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
Validator validator = new Validator();
TokenStream inputTokenStream = new CommonTokenStream(lexer);
AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream);
ret = new Expression(parser.query());
parser.removeErrorListeners();
parser.addErrorListener(validator);
// Validate the syntax of the query here
ret = parser.query();
if (!validator.isValid()) {
LOG.error("Invalid DSL: {} Reason: {}", queryStr, validator.getErrorMsg());
throw new AtlasBaseException(AtlasErrorCode.INVALID_DSL_QUERY, queryStr, validator.getErrorMsg());
}
} catch (IOException e) {
ret = null;
LOG.error(e.getMessage(), e);
throw new AtlasBaseException(e);
}
return ret;
}
}
static class Validator extends BaseErrorListener {
private boolean isValid = true;
private String errorMsg = "";
@Override
public void syntaxError(final Recognizer<?, ?> recognizer, final Object offendingSymbol, final int line, final int charPositionInLine, final String msg, final RecognitionException e) {
// TODO: Capture multiple datapoints
isValid = false;
errorMsg = msg;
}
public boolean isValid() {
return isValid;
}
public String getErrorMsg() {
return errorMsg;
}
}
public static class Translator {
private final AtlasDSLParser.QueryContext queryContext;
private final AtlasTypeRegistry typeRegistry;
private final int offset;
private final int limit;
public Translator(final AtlasDSLParser.QueryContext queryContext, AtlasTypeRegistry typeRegistry, int offset, int limit) {
this.queryContext = queryContext;
this.typeRegistry = typeRegistry;
this.offset = offset;
this.limit = limit;
}
public GremlinQuery translate() {
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(typeRegistry);
if (offset >= 0) {
if (!gremlinQueryComposer.hasLimitOffset()) {
gremlinQueryComposer.addLimit(Integer.toString(limit), Integer.toString(offset));
}
}
DSLVisitor dslVisitor = new DSLVisitor(gremlinQueryComposer);
// Now process the Query and collect translation in
queryContext.accept(dslVisitor);
return new GremlinQuery(gremlinQueryComposer.get(), gremlinQueryComposer.hasSelect());
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.query.antlr4.AtlasDSLParser.QueryContext;
public class Expressions {
public static class Expression {
private final QueryContext parsedQuery;
public Expression(QueryContext q) {
parsedQuery = q;
}
public Expression isReady() {
return (parsedQuery != null ? this : null);
}
public void accept(DSLVisitor qv) {
qv.visit(parsedQuery);
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.query.Expressions.Expression;
public class GremlinTranslator {
private final QueryProcessor queryProcessor;
private Expression expression;
public GremlinTranslator(QueryProcessor queryProcessor, Expression expression) {
this.expression = expression;
this.queryProcessor = queryProcessor;
}
public GremlinQuery translate() {
DSLVisitor qv = new DSLVisitor(queryProcessor);
expression.accept(qv);
queryProcessor.close();
GremlinQuery ret = new GremlinQuery(queryProcessor.getText(), queryProcessor.hasSelect());
return ret;
}
}
......@@ -25,17 +25,27 @@ import java.util.regex.Pattern;
public class IdentifierHelper {
public static String stripQuotes(String quotedIdentifier) {
String ret = quotedIdentifier;
if (isQuoted(quotedIdentifier)) {
ret = quotedIdentifier.substring(1, quotedIdentifier.length() - 1);
private static final Pattern SINGLE_QUOTED_IDENTIFIER = Pattern.compile("'(\\w[\\w\\d\\.\\s]*)'");
private static final Pattern DOUBLE_QUOTED_IDENTIFIER = Pattern.compile("\"(\\w[\\w\\d\\.\\s]*)\"");
private static final Pattern BACKTICK_QUOTED_IDENTIFIER = Pattern.compile("`(\\w[\\w\\d\\.\\s]*)`");
public static String get(String quotedIdentifier) {
String ret;
if (quotedIdentifier.charAt(0) == '`') {
ret = extract(BACKTICK_QUOTED_IDENTIFIER, quotedIdentifier);
} else if (quotedIdentifier.charAt(0) == '\'') {
ret = extract(SINGLE_QUOTED_IDENTIFIER, quotedIdentifier);
} else if (quotedIdentifier.charAt(0) == '"') {
ret = extract(DOUBLE_QUOTED_IDENTIFIER, quotedIdentifier);
} else {
ret = quotedIdentifier;
}
return ret;
}
public static Advice create(QueryProcessor.Context context,
public static Advice create(GremlinQueryComposer.Context context,
org.apache.atlas.query.Lookup lookup,
String identifier) {
Advice ia = new Advice(identifier);
......@@ -49,7 +59,7 @@ public class IdentifierHelper {
}
public static String getQualifiedName(org.apache.atlas.query.Lookup lookup,
QueryProcessor.Context context,
GremlinQueryComposer.Context context,
String name) {
return lookup.getQualifiedName(context, name);
}
......@@ -70,7 +80,9 @@ public class IdentifierHelper {
}
public static String removeQuotes(String rhs) {
return rhs.replace("\"", "").replace("'", "");
return rhs.replace("\"", "")
.replace("'", "")
.replace("`", "");
}
public static String getQuoted(String s) {
......@@ -97,10 +109,10 @@ public class IdentifierHelper {
public Advice(String s) {
this.raw = removeQuotes(s);
this.actual = IdentifierHelper.stripQuotes(raw);
this.actual = IdentifierHelper.get(raw);
}
private void update(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) {
private void update(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
newContext = context.isEmpty();
if(!newContext) {
if(context.aliasMap.containsKey(this.raw)) {
......@@ -116,7 +128,7 @@ public class IdentifierHelper {
}
}
private void updateSubTypes(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) {
private void updateSubTypes(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(isTrait) {
return;
}
......@@ -127,7 +139,7 @@ public class IdentifierHelper {
}
}
private void updateEdgeInfo(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) {
private void updateEdgeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(isPrimitive == false && isTrait == false) {
edgeLabel = lookup.getRelationshipEdgeLabel(context, attributeName);
edgeDirection = "OUT";
......@@ -135,7 +147,7 @@ public class IdentifierHelper {
}
}
private void updateTypeInfo(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) {
private void updateTypeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(parts.length == 1) {
typeName = context.getActiveTypeName();
attributeName = parts[0];
......@@ -171,7 +183,7 @@ public class IdentifierHelper {
}
}
private void setIsDate(Lookup lookup, QueryProcessor.Context context) {
private void setIsDate(Lookup lookup, GremlinQueryComposer.Context context) {
if(isPrimitive) {
isDate = lookup.isDate(context, attributeName);
}
......
......@@ -23,21 +23,21 @@ import org.apache.atlas.type.AtlasType;
public interface Lookup {
AtlasType getType(String typeName);
String getQualifiedName(QueryProcessor.Context context, String name);
String getQualifiedName(GremlinQueryComposer.Context context, String name);
boolean isPrimitive(QueryProcessor.Context context, String attributeName);
boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName);
String getRelationshipEdgeLabel(QueryProcessor.Context context, String attributeName);
String getRelationshipEdgeLabel(GremlinQueryComposer.Context context, String attributeName);
boolean hasAttribute(QueryProcessor.Context context, String typeName);
boolean hasAttribute(GremlinQueryComposer.Context context, String typeName);
boolean doesTypeHaveSubTypes(QueryProcessor.Context context);
boolean doesTypeHaveSubTypes(GremlinQueryComposer.Context context);
String getTypeAndSubTypes(QueryProcessor.Context context);
String getTypeAndSubTypes(GremlinQueryComposer.Context context);
boolean isTraitType(QueryProcessor.Context context);
boolean isTraitType(GremlinQueryComposer.Context context);
String getTypeFromEdge(QueryProcessor.Context context, String item);
String getTypeFromEdge(GremlinQueryComposer.Context context, String item);
boolean isDate(QueryProcessor.Context context, String attributeName);
boolean isDate(GremlinQueryComposer.Context context, String attributeName);
}
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.RuntimeMetaData;
import org.antlr.v4.runtime.Vocabulary;
import org.antlr.v4.runtime.VocabularyImpl;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.atn.PredictionContextCache;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class AtlasDSLLexer extends Lexer {
......
SINGLE_LINE_COMMENT=1
MULTILINE_COMMENT=2
WS=3
NUMBER=4
FLOATING_NUMBER=5
BOOL=6
K_COMMA=7
K_PLUS=8
K_MINUS=9
K_STAR=10
K_DIV=11
K_DOT=12
K_LIKE=13
K_AND=14
K_OR=15
K_LPAREN=16
K_LBRACKET=17
K_RPAREN=18
K_RBRACKET=19
K_LT=20
K_LTE=21
K_EQ=22
K_NEQ=23
K_GT=24
K_GTE=25
K_FROM=26
K_WHERE=27
K_ORDERBY=28
K_GROUPBY=29
K_LIMIT=30
K_SELECT=31
K_MAX=32
K_MIN=33
K_SUM=34
K_COUNT=35
K_LOOP=36
K_OFFSET=37
K_AS=38
K_ISA=39
K_IS=40
K_HAS=41
K_ASC=42
K_DESC=43
K_WITHPATH=44
K_TRUE=45
K_FALSE=46
KEYWORD=47
ID=48
STRING=49
','=7
'+'=8
'-'=9
'*'=10
'/'=11
'.'=12
'('=16
'['=17
')'=18
']'=19
SINGLE_LINE_COMMENT=1
MULTILINE_COMMENT=2
WS=3
NUMBER=4
FLOATING_NUMBER=5
BOOL=6
K_COMMA=7
K_PLUS=8
K_MINUS=9
K_STAR=10
K_DIV=11
K_DOT=12
K_LIKE=13
K_AND=14
K_OR=15
K_LPAREN=16
K_LBRACKET=17
K_RPAREN=18
K_RBRACKET=19
K_LT=20
K_LTE=21
K_EQ=22
K_NEQ=23
K_GT=24
K_GTE=25
K_FROM=26
K_WHERE=27
K_ORDERBY=28
K_GROUPBY=29
K_LIMIT=30
K_SELECT=31
K_MAX=32
K_MIN=33
K_SUM=34
K_COUNT=35
K_LOOP=36
K_OFFSET=37
K_AS=38
K_ISA=39
K_IS=40
K_HAS=41
K_ASC=42
K_DESC=43
K_WITHPATH=44
K_TRUE=45
K_FALSE=46
KEYWORD=47
ID=48
STRING=49
','=7
'+'=8
'-'=9
'*'=10
'/'=11
'.'=12
'('=16
'['=17
')'=18
']'=19
......@@ -23,7 +23,7 @@ import org.apache.atlas.AtlasException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.query.QueryParser;
import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.repository.store.graph.AtlasDefStore;
import org.apache.atlas.type.AtlasTypeRegistry;
......@@ -64,7 +64,7 @@ import java.util.regex.Pattern;
if (!allowReservedKeywords && typeDef instanceof AtlasStructDef) {
final List<AtlasStructDef.AtlasAttributeDef> attributeDefs = ((AtlasStructDef) typeDef).getAttributeDefs();
for (AtlasStructDef.AtlasAttributeDef attrDef : attributeDefs) {
if (QueryParser.isKeyword(attrDef.getName())) {
if (AtlasDSL.Parser.isKeyword(attrDef.getName())) {
throw new AtlasBaseException(AtlasErrorCode.ATTRIBUTE_NAME_INVALID, attrDef.getName(), typeDef.getCategory().name());
}
}
......
......@@ -25,7 +25,7 @@ import org.apache.atlas.model.typedef.AtlasRelationshipDef;
import org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory;
import org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags;
import org.apache.atlas.model.typedef.AtlasRelationshipEndDef;
import org.apache.atlas.query.QueryParser;
import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graphdb.AtlasEdge;
import org.apache.atlas.repository.graphdb.AtlasVertex;
......@@ -354,11 +354,11 @@ public class AtlasRelationshipDefStoreV1 extends AtlasAbstractDefStoreV1<AtlasRe
}
if (!allowReservedKeywords) {
if (QueryParser.isKeyword(end1.getName())) {
if (AtlasDSL.Parser.isKeyword(end1.getName())) {
throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END1_NAME_INVALID, end1.getName());
}
if (QueryParser.isKeyword(end2.getName())) {
if (AtlasDSL.Parser.isKeyword(end2.getName())) {
throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END2_NAME_INVALID, end2.getName());
}
}
......
......@@ -127,8 +127,8 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where hive_db is JdbcAccess", 0},
{"hive_db where hive_db has name", 3},
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
{"Dimension", 5},
{"JdbcAccess", 2},
{"ETL", 5},
......@@ -240,12 +240,12 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 5", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
......@@ -285,7 +285,7 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_column select hive_column.qualifiedName orderby qualifiedName desc limit 5", 5, "hive_column.qualifiedName", false},
{"from hive_db orderby hive_db.owner limit 3", 3, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby 'owner'", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby owner", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby hive_db.owner limit 10 ", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" select name, owner orderby hive_db.name ", 1, "name", true},
......@@ -327,16 +327,16 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 0", 2, "owner", true},
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 1", 2, "owner", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 0", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 5", 0, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 0", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 5", 0, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' ", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 0", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 1", 0, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 0 offset 1", 0, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name ", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 0", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 1", 0, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 0 offset 1", 0, "_col_0", true},
};
}
......
......@@ -41,7 +41,7 @@ import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
public class QueryProcessorTest {
public class GremlinQueryComposerTest {
private List<String> errorList = new ArrayList<>();
@Test
......@@ -56,8 +56,9 @@ public class QueryProcessorTest {
verify("Table isa Dimension", expected);
verify("Table is Dimension", expected);
verify("Table where Table is Dimension", expected);
verify("Table isa Dimension where name = 'sales'",
"g.V().has('__typeName', 'Table').has('__traitNames', within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()");
// Not supported since it requires two singleSrcQuery, one for isa clause other for where clause
// verify("Table isa Dimension where name = 'sales'",
// "g.V().has('__typeName', 'Table').has('__traitNames', within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()");
}
@Test
......@@ -90,10 +91,15 @@ public class QueryProcessorTest {
public void tableSelectColumns() {
String exMain = "g.V().has('__typeName', 'Table').out('__Table.columns').limit(10).toList()";
String exSel = "def f(r){ r }";
String exSel1 = "def f(r){ return [['db.name']].plus(r.collect({[it.value('DB.name')]})).unique(); }";
verify("Table select columns limit 10", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'Table').out('__Table.db').limit(25).toList()";
verify("Table select db.name", getExpected(exSel, exMain2));
verify("Table select db", getExpected(exSel, exMain2));
String exMain3 = "g.V().has('__typeName', 'Table').out('__Table.db').limit(25).toList()";
verify("Table select db.name", getExpected(exSel1, exMain3));
}
@Test(enabled = false)
......@@ -117,7 +123,7 @@ public class QueryProcessorTest {
String exSel = "def f(r){ return [['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique(); }";
String exMain = "g.V().has('__typeName', 'DB').as('d').order().by('DB.owner)').limit(25).toList()";
String exMain = "g.V().has('__typeName', 'DB').as('d').order().by('DB.owner').limit(25).toList()";
verify("DB as d select d.name, d.owner orderby (d.owner) limit 25", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'Table').and(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.createTime', gt('1388563200000'))).order().by('Table.createTime').limit(25).toList()";
......@@ -151,7 +157,7 @@ public class QueryProcessorTest {
verify("Table where Asset.name like \"Tab*\"",
"g.V().has('__typeName', 'Table').has('Table.name', org.janusgraph.core.attribute.Text.textRegex(\"Tab.*\")).limit(25).toList()");
verify("from Table where (db.name = \"Reporting\")",
"g.V().has('__typeName', 'Table').out('__Table.db').has('DB.name', eq(\"Reporting\")).in('__Table.db').limit(25).toList()");
"g.V().has('__typeName', 'Table').out('__Table.db').has('DB.name', eq(\"Reporting\")).dedup().in('__Table.db').limit(25).toList()");
}
@Test
......@@ -169,14 +175,6 @@ public class QueryProcessorTest {
}
@Test
public void multipleWhereClauses() {
String exSel = "def f(r){ return [['c.owner','c.name','c.dataType']].plus(r.collect({[it.value('Column.owner'),it.value('Column.name'),it.value('Column.dataType')]})).unique(); }";
String exMain = "g.V().has('__typeName', 'Table').has('Table.name', eq(\"sales_fact\")).out('__Table.columns').as('c').limit(25).toList()";
verify("Table where name=\"sales_fact\", columns as c select c.owner, c.name, c.dataType", getExpected(exSel, exMain));
;
}
@Test
public void subType() {
String exMain = "g.V().has('__typeName', within('Asset','Table')).limit(25).toList()";
String exSel = "def f(r){ return [['name','owner']].plus(r.collect({[it.value('Asset.name'),it.value('Asset.owner')]})).unique(); }";
......@@ -253,7 +251,7 @@ public class QueryProcessorTest {
verify("hive_db where hive_db.name='Reporting' and hive_db.createTime < '2017-12-12T02:35:58.440Z'",
"g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', eq('Reporting')),__.has('hive_db.createTime', lt('1513046158440'))).limit(25).toList()");
verify("Table where db.name='Sales' and db.clusterName='cl1'",
"g.V().has('__typeName', 'Table').and(__.out('__Table.db').has('DB.name', eq('Sales')).in('__Table.db'),__.out('__Table.db').has('DB.clusterName', eq('cl1')).in('__Table.db')).limit(25).toList()");
"g.V().has('__typeName', 'Table').and(__.out('__Table.db').has('DB.name', eq('Sales')).dedup().in('__Table.db'),__.out('__Table.db').has('DB.clusterName', eq('cl1')).dedup().in('__Table.db')).limit(25).toList()");
}
private void verify(String dsl, String expectedGremlin) {
......@@ -290,14 +288,13 @@ public class QueryProcessorTest {
private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) {
AtlasTypeRegistry registry = mock(AtlasTypeRegistry.class);
org.apache.atlas.query.Lookup lookup = new TestLookup(errorList, registry);
QueryProcessor.Context context = new QueryProcessor.Context(errorList, lookup);
GremlinQueryComposer.Context context = new GremlinQueryComposer.Context(errorList, lookup);
QueryProcessor queryProcessor = new QueryProcessor(lookup, context);
DSLVisitor qv = new DSLVisitor(queryProcessor);
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(lookup, context);
DSLVisitor qv = new DSLVisitor(gremlinQueryComposer);
qv.visit(queryContext);
queryProcessor.close();
String s = queryProcessor.getText();
String s = gremlinQueryComposer.get();
assertTrue(StringUtils.isNotEmpty(s));
return s;
}
......@@ -326,7 +323,7 @@ public class QueryProcessorTest {
}
@Override
public String getQualifiedName(QueryProcessor.Context context, String name) {
public String getQualifiedName(GremlinQueryComposer.Context context, String name) {
if(name.contains("."))
return name;
......@@ -334,7 +331,7 @@ public class QueryProcessorTest {
}
@Override
public boolean isPrimitive(QueryProcessor.Context context, String attributeName) {
public boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName) {
return attributeName.equals("name") ||
attributeName.equals("owner") ||
attributeName.equals("createdTime") ||
......@@ -343,7 +340,7 @@ public class QueryProcessorTest {
}
@Override
public String getRelationshipEdgeLabel(QueryProcessor.Context context, String attributeName) {
public String getRelationshipEdgeLabel(GremlinQueryComposer.Context context, String attributeName) {
if (attributeName.equalsIgnoreCase("columns"))
return "__Table.columns";
if (attributeName.equalsIgnoreCase("db"))
......@@ -353,29 +350,29 @@ public class QueryProcessorTest {
}
@Override
public boolean hasAttribute(QueryProcessor.Context context, String typeName) {
public boolean hasAttribute(GremlinQueryComposer.Context context, String typeName) {
return (context.getActiveTypeName().equals("Table") && typeName.equals("db")) ||
(context.getActiveTypeName().equals("Table") && typeName.equals("columns"));
}
@Override
public boolean doesTypeHaveSubTypes(QueryProcessor.Context context) {
public boolean doesTypeHaveSubTypes(GremlinQueryComposer.Context context) {
return context.getActiveTypeName().equalsIgnoreCase("Asset");
}
@Override
public String getTypeAndSubTypes(QueryProcessor.Context context) {
public String getTypeAndSubTypes(GremlinQueryComposer.Context context) {
String[] str = new String[]{"'Asset'", "'Table'"};
return StringUtils.join(str, ",");
}
@Override
public boolean isTraitType(QueryProcessor.Context context) {
public boolean isTraitType(GremlinQueryComposer.Context context) {
return context.getActiveTypeName().equals("PII") || context.getActiveTypeName().equals("Dimension");
}
@Override
public String getTypeFromEdge(QueryProcessor.Context context, String item) {
public String getTypeFromEdge(GremlinQueryComposer.Context context, String item) {
if(context.getActiveTypeName().equals("DB") && item.equals("Table")) {
return "Table";
} else if(context.getActiveTypeName().equals("Table") && item.equals("Column")) {
......@@ -389,7 +386,7 @@ public class QueryProcessorTest {
}
@Override
public boolean isDate(QueryProcessor.Context context, String attributeName) {
public boolean isDate(GremlinQueryComposer.Context context, String attributeName) {
return attributeName.equals("createdTime") ||
attributeName.equals("createTime");
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment