Commit 8db8b5c7 by apoorvnaik Committed by Madhan Neethiraj

ATLAS-2229: DSL implementation using ANTLR - #4 (multiple fixes)

parent 5384a742
...@@ -102,7 +102,8 @@ public enum AtlasErrorCode { ...@@ -102,7 +102,8 @@ public enum AtlasErrorCode {
INVALID_ENTITY_FOR_CLASSIFICATION (400, "ATLAS-400-00-055", "Entity (guid=‘{0}‘,typename=‘{1}‘) cannot be classified by Classification ‘{2}‘, because ‘{1}‘ is not in the ClassificationDef's restrictions."), INVALID_ENTITY_FOR_CLASSIFICATION (400, "ATLAS-400-00-055", "Entity (guid=‘{0}‘,typename=‘{1}‘) cannot be classified by Classification ‘{2}‘, because ‘{1}‘ is not in the ClassificationDef's restrictions."),
SAVED_SEARCH_CHANGE_USER(400, "ATLAS-400-00-056", "saved-search {0} can not be moved from user {1} to {2}"), SAVED_SEARCH_CHANGE_USER(400, "ATLAS-400-00-056", "saved-search {0} can not be moved from user {1} to {2}"),
INVALID_QUERY_PARAM_LENGTH(400, "ATLAS-400-00-057" , "Length of query param {0} exceeds the limit"), INVALID_QUERY_PARAM_LENGTH(400, "ATLAS-400-00-057" , "Length of query param {0} exceeds the limit"),
INVALID_QUERY_LENGTH(400, "ATLAS-400-00-057" , "Invalid query length, update {0} to change the limit" ), INVALID_QUERY_LENGTH(400, "ATLAS-400-00-058" , "Invalid query length, update {0} to change the limit" ),
INVALID_DSL_QUERY(400, "ATLAS-400-00-059" , "Invalid DSL query: {0} Reason: {1}. Please refer to Atlas DSL grammar for more information" ),
// All Not found enums go here // All Not found enums go here
TYPE_NAME_NOT_FOUND(404, "ATLAS-404-00-001", "Given typename {0} was invalid"), TYPE_NAME_NOT_FOUND(404, "ATLAS-404-00-001", "Given typename {0} was invalid"),
......
...@@ -33,12 +33,9 @@ import org.apache.atlas.model.discovery.SearchParameters; ...@@ -33,12 +33,9 @@ import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.model.instance.AtlasEntityHeader; import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.instance.AtlasObjectId; import org.apache.atlas.model.instance.AtlasObjectId;
import org.apache.atlas.model.profile.AtlasUserSavedSearch; import org.apache.atlas.model.profile.AtlasUserSavedSearch;
import org.apache.atlas.query.Expressions.Expression; import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.query.GremlinQuery; import org.apache.atlas.query.GremlinQuery;
import org.apache.atlas.query.GremlinTranslator;
import org.apache.atlas.query.QueryParams; import org.apache.atlas.query.QueryParams;
import org.apache.atlas.query.QueryParser;
import org.apache.atlas.query.QueryProcessor;
import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer; import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper; import org.apache.atlas.repository.graph.GraphHelper;
...@@ -678,16 +675,8 @@ public class EntityDiscoveryService implements AtlasDiscoveryService { ...@@ -678,16 +675,8 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
} }
private GremlinQuery toGremlinQuery(String query, int limit, int offset) throws AtlasBaseException { private GremlinQuery toGremlinQuery(String query, int limit, int offset) throws AtlasBaseException {
QueryParams params = validateSearchParams(limit, offset); QueryParams params = validateSearchParams(limit, offset);
Expression expression = QueryParser.apply(query, params); GremlinQuery gremlinQuery = new AtlasDSL.Translator(AtlasDSL.Parser.parse(query), typeRegistry, params.offset(), params.limit()).translate();
if (expression == null) {
throw new AtlasBaseException(DISCOVERY_QUERY_FAILED, query);
}
QueryProcessor queryProcessor = new QueryProcessor(typeRegistry, limit, offset);
Expression validExpression = queryProcessor.validate(expression);
GremlinQuery gremlinQuery = new GremlinTranslator(queryProcessor, validExpression).translate();
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Translated Gremlin Query: {}", gremlinQuery.queryStr()); LOG.debug("Translated Gremlin Query: {}", gremlinQuery.queryStr());
......
...@@ -17,12 +17,17 @@ ...@@ -17,12 +17,17 @@
*/ */
package org.apache.atlas.query; package org.apache.atlas.query;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CharStreams; import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.TokenStream;
import org.apache.atlas.query.Expressions.Expression; import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.query.antlr4.AtlasDSLLexer; import org.apache.atlas.query.antlr4.AtlasDSLLexer;
import org.apache.atlas.query.antlr4.AtlasDSLParser; import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -33,34 +38,98 @@ import java.util.Arrays; ...@@ -33,34 +38,98 @@ import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
public class QueryParser { public class AtlasDSL {
private static final Logger LOG = LoggerFactory.getLogger(QueryParser.class);
private static final Set<String> RESERVED_KEYWORDS = public static class Parser {
new HashSet<>(Arrays.asList("[", "]", "(", ")", "=", "<", ">", "!=", "<=", ">=", ",", "and", "or", "+", "-", private static final Logger LOG = LoggerFactory.getLogger(Parser.class);
"*", "/", ".", "select", "from", "where", "groupby", "loop", "isa", "is", "has",
"as", "times", "withPath", "limit", "offset", "orderby", "count", "max", "min", private static final Set<String> RESERVED_KEYWORDS =
"sum", "by", "order", "like")); new HashSet<>(Arrays.asList("[", "]", "(", ")", "=", "<", ">", "!=", "<=", ">=", ",", "and", "or", "+", "-",
"*", "/", ".", "select", "from", "where", "groupby", "loop", "isa", "is", "has",
"as", "times", "withPath", "limit", "offset", "orderby", "count", "max", "min",
"sum", "by", "order", "like"));
public static boolean isKeyword(String word) {
return RESERVED_KEYWORDS.contains(word);
}
public static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
AtlasDSLParser.QueryContext ret;
try {
InputStream stream = new ByteArrayInputStream(queryStr.getBytes());
AtlasDSLLexer lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
Validator validator = new Validator();
TokenStream inputTokenStream = new CommonTokenStream(lexer);
AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream);
parser.removeErrorListeners();
parser.addErrorListener(validator);
// Validate the syntax of the query here
ret = parser.query();
if (!validator.isValid()) {
LOG.error("Invalid DSL: {} Reason: {}", queryStr, validator.getErrorMsg());
throw new AtlasBaseException(AtlasErrorCode.INVALID_DSL_QUERY, queryStr, validator.getErrorMsg());
}
} catch (IOException e) {
throw new AtlasBaseException(e);
}
return ret;
}
public static boolean isKeyword(String word) {
return RESERVED_KEYWORDS.contains(word);
} }
public static Expression apply(String queryStr, QueryParams params) { static class Validator extends BaseErrorListener {
Expression ret = null; private boolean isValid = true;
private String errorMsg = "";
@Override
public void syntaxError(final Recognizer<?, ?> recognizer, final Object offendingSymbol, final int line, final int charPositionInLine, final String msg, final RecognitionException e) {
// TODO: Capture multiple datapoints
isValid = false;
errorMsg = msg;
}
public boolean isValid() {
return isValid;
}
public String getErrorMsg() {
return errorMsg;
}
}
try { public static class Translator {
InputStream stream = new ByteArrayInputStream(queryStr.getBytes()); private final AtlasDSLParser.QueryContext queryContext;
AtlasDSLLexer lexer = new AtlasDSLLexer(CharStreams.fromStream(stream)); private final AtlasTypeRegistry typeRegistry;
TokenStream inputTokenStream = new CommonTokenStream(lexer); private final int offset;
AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream); private final int limit;
ret = new Expression(parser.query()); public Translator(final AtlasDSLParser.QueryContext queryContext, AtlasTypeRegistry typeRegistry, int offset, int limit) {
} catch (IOException e) { this.queryContext = queryContext;
ret = null; this.typeRegistry = typeRegistry;
LOG.error(e.getMessage(), e); this.offset = offset;
this.limit = limit;
} }
return ret; public GremlinQuery translate() {
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(typeRegistry);
if (offset >= 0) {
if (!gremlinQueryComposer.hasLimitOffset()) {
gremlinQueryComposer.addLimit(Integer.toString(limit), Integer.toString(offset));
}
}
DSLVisitor dslVisitor = new DSLVisitor(gremlinQueryComposer);
// Now process the Query and collect translation in
queryContext.accept(dslVisitor);
return new GremlinQuery(gremlinQueryComposer.get(), gremlinQueryComposer.hasSelect());
}
} }
} }
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.query.antlr4.AtlasDSLParser.QueryContext;
public class Expressions {
public static class Expression {
private final QueryContext parsedQuery;
public Expression(QueryContext q) {
parsedQuery = q;
}
public Expression isReady() {
return (parsedQuery != null ? this : null);
}
public void accept(DSLVisitor qv) {
qv.visit(parsedQuery);
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.query.Expressions.Expression;
public class GremlinTranslator {
private final QueryProcessor queryProcessor;
private Expression expression;
public GremlinTranslator(QueryProcessor queryProcessor, Expression expression) {
this.expression = expression;
this.queryProcessor = queryProcessor;
}
public GremlinQuery translate() {
DSLVisitor qv = new DSLVisitor(queryProcessor);
expression.accept(qv);
queryProcessor.close();
GremlinQuery ret = new GremlinQuery(queryProcessor.getText(), queryProcessor.hasSelect());
return ret;
}
}
...@@ -25,17 +25,27 @@ import java.util.regex.Pattern; ...@@ -25,17 +25,27 @@ import java.util.regex.Pattern;
public class IdentifierHelper { public class IdentifierHelper {
public static String stripQuotes(String quotedIdentifier) { private static final Pattern SINGLE_QUOTED_IDENTIFIER = Pattern.compile("'(\\w[\\w\\d\\.\\s]*)'");
String ret = quotedIdentifier; private static final Pattern DOUBLE_QUOTED_IDENTIFIER = Pattern.compile("\"(\\w[\\w\\d\\.\\s]*)\"");
private static final Pattern BACKTICK_QUOTED_IDENTIFIER = Pattern.compile("`(\\w[\\w\\d\\.\\s]*)`");
if (isQuoted(quotedIdentifier)) { public static String get(String quotedIdentifier) {
ret = quotedIdentifier.substring(1, quotedIdentifier.length() - 1); String ret;
if (quotedIdentifier.charAt(0) == '`') {
ret = extract(BACKTICK_QUOTED_IDENTIFIER, quotedIdentifier);
} else if (quotedIdentifier.charAt(0) == '\'') {
ret = extract(SINGLE_QUOTED_IDENTIFIER, quotedIdentifier);
} else if (quotedIdentifier.charAt(0) == '"') {
ret = extract(DOUBLE_QUOTED_IDENTIFIER, quotedIdentifier);
} else {
ret = quotedIdentifier;
} }
return ret; return ret;
} }
public static Advice create(QueryProcessor.Context context, public static Advice create(GremlinQueryComposer.Context context,
org.apache.atlas.query.Lookup lookup, org.apache.atlas.query.Lookup lookup,
String identifier) { String identifier) {
Advice ia = new Advice(identifier); Advice ia = new Advice(identifier);
...@@ -49,7 +59,7 @@ public class IdentifierHelper { ...@@ -49,7 +59,7 @@ public class IdentifierHelper {
} }
public static String getQualifiedName(org.apache.atlas.query.Lookup lookup, public static String getQualifiedName(org.apache.atlas.query.Lookup lookup,
QueryProcessor.Context context, GremlinQueryComposer.Context context,
String name) { String name) {
return lookup.getQualifiedName(context, name); return lookup.getQualifiedName(context, name);
} }
...@@ -70,7 +80,9 @@ public class IdentifierHelper { ...@@ -70,7 +80,9 @@ public class IdentifierHelper {
} }
public static String removeQuotes(String rhs) { public static String removeQuotes(String rhs) {
return rhs.replace("\"", "").replace("'", ""); return rhs.replace("\"", "")
.replace("'", "")
.replace("`", "");
} }
public static String getQuoted(String s) { public static String getQuoted(String s) {
...@@ -97,10 +109,10 @@ public class IdentifierHelper { ...@@ -97,10 +109,10 @@ public class IdentifierHelper {
public Advice(String s) { public Advice(String s) {
this.raw = removeQuotes(s); this.raw = removeQuotes(s);
this.actual = IdentifierHelper.stripQuotes(raw); this.actual = IdentifierHelper.get(raw);
} }
private void update(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) { private void update(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
newContext = context.isEmpty(); newContext = context.isEmpty();
if(!newContext) { if(!newContext) {
if(context.aliasMap.containsKey(this.raw)) { if(context.aliasMap.containsKey(this.raw)) {
...@@ -116,7 +128,7 @@ public class IdentifierHelper { ...@@ -116,7 +128,7 @@ public class IdentifierHelper {
} }
} }
private void updateSubTypes(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) { private void updateSubTypes(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(isTrait) { if(isTrait) {
return; return;
} }
...@@ -127,7 +139,7 @@ public class IdentifierHelper { ...@@ -127,7 +139,7 @@ public class IdentifierHelper {
} }
} }
private void updateEdgeInfo(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) { private void updateEdgeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(isPrimitive == false && isTrait == false) { if(isPrimitive == false && isTrait == false) {
edgeLabel = lookup.getRelationshipEdgeLabel(context, attributeName); edgeLabel = lookup.getRelationshipEdgeLabel(context, attributeName);
edgeDirection = "OUT"; edgeDirection = "OUT";
...@@ -135,7 +147,7 @@ public class IdentifierHelper { ...@@ -135,7 +147,7 @@ public class IdentifierHelper {
} }
} }
private void updateTypeInfo(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) { private void updateTypeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(parts.length == 1) { if(parts.length == 1) {
typeName = context.getActiveTypeName(); typeName = context.getActiveTypeName();
attributeName = parts[0]; attributeName = parts[0];
...@@ -171,7 +183,7 @@ public class IdentifierHelper { ...@@ -171,7 +183,7 @@ public class IdentifierHelper {
} }
} }
private void setIsDate(Lookup lookup, QueryProcessor.Context context) { private void setIsDate(Lookup lookup, GremlinQueryComposer.Context context) {
if(isPrimitive) { if(isPrimitive) {
isDate = lookup.isDate(context, attributeName); isDate = lookup.isDate(context, attributeName);
} }
......
...@@ -23,21 +23,21 @@ import org.apache.atlas.type.AtlasType; ...@@ -23,21 +23,21 @@ import org.apache.atlas.type.AtlasType;
public interface Lookup { public interface Lookup {
AtlasType getType(String typeName); AtlasType getType(String typeName);
String getQualifiedName(QueryProcessor.Context context, String name); String getQualifiedName(GremlinQueryComposer.Context context, String name);
boolean isPrimitive(QueryProcessor.Context context, String attributeName); boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName);
String getRelationshipEdgeLabel(QueryProcessor.Context context, String attributeName); String getRelationshipEdgeLabel(GremlinQueryComposer.Context context, String attributeName);
boolean hasAttribute(QueryProcessor.Context context, String typeName); boolean hasAttribute(GremlinQueryComposer.Context context, String typeName);
boolean doesTypeHaveSubTypes(QueryProcessor.Context context); boolean doesTypeHaveSubTypes(GremlinQueryComposer.Context context);
String getTypeAndSubTypes(QueryProcessor.Context context); String getTypeAndSubTypes(GremlinQueryComposer.Context context);
boolean isTraitType(QueryProcessor.Context context); boolean isTraitType(GremlinQueryComposer.Context context);
String getTypeFromEdge(QueryProcessor.Context context, String item); String getTypeFromEdge(GremlinQueryComposer.Context context, String item);
boolean isDate(QueryProcessor.Context context, String attributeName); boolean isDate(GremlinQueryComposer.Context context, String attributeName);
} }
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.g4 by ANTLR 4.7 // Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4; package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.RuntimeMetaData;
import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.Vocabulary;
import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.VocabularyImpl;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.atn.PredictionContextCache;
import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class AtlasDSLLexer extends Lexer { public class AtlasDSLLexer extends Lexer {
......
SINGLE_LINE_COMMENT=1
MULTILINE_COMMENT=2
WS=3
NUMBER=4
FLOATING_NUMBER=5
BOOL=6
K_COMMA=7
K_PLUS=8
K_MINUS=9
K_STAR=10
K_DIV=11
K_DOT=12
K_LIKE=13
K_AND=14
K_OR=15
K_LPAREN=16
K_LBRACKET=17
K_RPAREN=18
K_RBRACKET=19
K_LT=20
K_LTE=21
K_EQ=22
K_NEQ=23
K_GT=24
K_GTE=25
K_FROM=26
K_WHERE=27
K_ORDERBY=28
K_GROUPBY=29
K_LIMIT=30
K_SELECT=31
K_MAX=32
K_MIN=33
K_SUM=34
K_COUNT=35
K_LOOP=36
K_OFFSET=37
K_AS=38
K_ISA=39
K_IS=40
K_HAS=41
K_ASC=42
K_DESC=43
K_WITHPATH=44
K_TRUE=45
K_FALSE=46
KEYWORD=47
ID=48
STRING=49
','=7
'+'=8
'-'=9
'*'=10
'/'=11
'.'=12
'('=16
'['=17
')'=18
']'=19
...@@ -107,4 +107,4 @@ querySrc: commaDelimitedQueries | spaceDelimitedQueries ; ...@@ -107,4 +107,4 @@ querySrc: commaDelimitedQueries | spaceDelimitedQueries ;
query: querySrc groupByExpression? query: querySrc groupByExpression?
selectClause? selectClause?
orderByExpr? orderByExpr?
limitOffset? ; limitOffset? ;
\ No newline at end of file
SINGLE_LINE_COMMENT=1
MULTILINE_COMMENT=2
WS=3
NUMBER=4
FLOATING_NUMBER=5
BOOL=6
K_COMMA=7
K_PLUS=8
K_MINUS=9
K_STAR=10
K_DIV=11
K_DOT=12
K_LIKE=13
K_AND=14
K_OR=15
K_LPAREN=16
K_LBRACKET=17
K_RPAREN=18
K_RBRACKET=19
K_LT=20
K_LTE=21
K_EQ=22
K_NEQ=23
K_GT=24
K_GTE=25
K_FROM=26
K_WHERE=27
K_ORDERBY=28
K_GROUPBY=29
K_LIMIT=30
K_SELECT=31
K_MAX=32
K_MIN=33
K_SUM=34
K_COUNT=35
K_LOOP=36
K_OFFSET=37
K_AS=38
K_ISA=39
K_IS=40
K_HAS=41
K_ASC=42
K_DESC=43
K_WITHPATH=44
K_TRUE=45
K_FALSE=46
KEYWORD=47
ID=48
STRING=49
','=7
'+'=8
'-'=9
'*'=10
'/'=11
'.'=12
'('=16
'['=17
')'=18
']'=19
...@@ -23,7 +23,7 @@ import org.apache.atlas.AtlasException; ...@@ -23,7 +23,7 @@ import org.apache.atlas.AtlasException;
import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef; import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.model.typedef.AtlasStructDef; import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.query.QueryParser; import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.repository.store.graph.AtlasDefStore; import org.apache.atlas.repository.store.graph.AtlasDefStore;
import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.type.AtlasTypeRegistry;
...@@ -64,7 +64,7 @@ import java.util.regex.Pattern; ...@@ -64,7 +64,7 @@ import java.util.regex.Pattern;
if (!allowReservedKeywords && typeDef instanceof AtlasStructDef) { if (!allowReservedKeywords && typeDef instanceof AtlasStructDef) {
final List<AtlasStructDef.AtlasAttributeDef> attributeDefs = ((AtlasStructDef) typeDef).getAttributeDefs(); final List<AtlasStructDef.AtlasAttributeDef> attributeDefs = ((AtlasStructDef) typeDef).getAttributeDefs();
for (AtlasStructDef.AtlasAttributeDef attrDef : attributeDefs) { for (AtlasStructDef.AtlasAttributeDef attrDef : attributeDefs) {
if (QueryParser.isKeyword(attrDef.getName())) { if (AtlasDSL.Parser.isKeyword(attrDef.getName())) {
throw new AtlasBaseException(AtlasErrorCode.ATTRIBUTE_NAME_INVALID, attrDef.getName(), typeDef.getCategory().name()); throw new AtlasBaseException(AtlasErrorCode.ATTRIBUTE_NAME_INVALID, attrDef.getName(), typeDef.getCategory().name());
} }
} }
......
...@@ -25,7 +25,7 @@ import org.apache.atlas.model.typedef.AtlasRelationshipDef; ...@@ -25,7 +25,7 @@ import org.apache.atlas.model.typedef.AtlasRelationshipDef;
import org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory; import org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory;
import org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags; import org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags;
import org.apache.atlas.model.typedef.AtlasRelationshipEndDef; import org.apache.atlas.model.typedef.AtlasRelationshipEndDef;
import org.apache.atlas.query.QueryParser; import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graphdb.AtlasEdge; import org.apache.atlas.repository.graphdb.AtlasEdge;
import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.AtlasVertex;
...@@ -354,11 +354,11 @@ public class AtlasRelationshipDefStoreV1 extends AtlasAbstractDefStoreV1<AtlasRe ...@@ -354,11 +354,11 @@ public class AtlasRelationshipDefStoreV1 extends AtlasAbstractDefStoreV1<AtlasRe
} }
if (!allowReservedKeywords) { if (!allowReservedKeywords) {
if (QueryParser.isKeyword(end1.getName())) { if (AtlasDSL.Parser.isKeyword(end1.getName())) {
throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END1_NAME_INVALID, end1.getName()); throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END1_NAME_INVALID, end1.getName());
} }
if (QueryParser.isKeyword(end2.getName())) { if (AtlasDSL.Parser.isKeyword(end2.getName())) {
throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END2_NAME_INVALID, end2.getName()); throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END2_NAME_INVALID, end2.getName());
} }
} }
......
...@@ -127,8 +127,8 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -127,8 +127,8 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where hive_db is JdbcAccess", 0}, {"hive_db where hive_db is JdbcAccess", 0},
{"hive_db where hive_db has name", 3}, {"hive_db where hive_db has name", 3},
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0}, {"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
{"Dimension", 5}, {"Dimension", 5},
{"JdbcAccess", 2}, {"JdbcAccess", 2},
{"ETL", 5}, {"ETL", 5},
...@@ -240,12 +240,12 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -240,12 +240,12 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0}, {"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 ", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 5", 0}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 5", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1", 0}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
...@@ -285,7 +285,7 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -285,7 +285,7 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_column select hive_column.qualifiedName orderby qualifiedName desc limit 5", 5, "hive_column.qualifiedName", false}, {"hive_column select hive_column.qualifiedName orderby qualifiedName desc limit 5", 5, "hive_column.qualifiedName", false},
{"from hive_db orderby hive_db.owner limit 3", 3, "owner", true}, {"from hive_db orderby hive_db.owner limit 3", 3, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby 'owner'", 1, "owner", true}, {"hive_db where hive_db.name=\"Reporting\" orderby owner", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby hive_db.owner limit 10 ", 1, "owner", true}, {"hive_db where hive_db.name=\"Reporting\" orderby hive_db.owner limit 10 ", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" select name, owner orderby hive_db.name ", 1, "name", true}, {"hive_db where hive_db.name=\"Reporting\" select name, owner orderby hive_db.name ", 1, "name", true},
...@@ -327,16 +327,16 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -327,16 +327,16 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 0", 2, "owner", true}, {"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 0", 2, "owner", true},
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 1", 2, "owner", true}, {"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 1", 2, "owner", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' ", 1, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 ", 1, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 0", 1, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 0", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 5", 0, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 5", 0, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' ", 1, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name ", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 0", 1, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 0", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 1", 0, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 1", 0, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10", 1, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 0 offset 1", 0, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 0 offset 1", 0, "_col_0", true},
}; };
} }
......
...@@ -41,7 +41,7 @@ import static org.testng.Assert.assertNotNull; ...@@ -41,7 +41,7 @@ import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull; import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue; import static org.testng.Assert.assertTrue;
public class QueryProcessorTest { public class GremlinQueryComposerTest {
private List<String> errorList = new ArrayList<>(); private List<String> errorList = new ArrayList<>();
@Test @Test
...@@ -56,8 +56,9 @@ public class QueryProcessorTest { ...@@ -56,8 +56,9 @@ public class QueryProcessorTest {
verify("Table isa Dimension", expected); verify("Table isa Dimension", expected);
verify("Table is Dimension", expected); verify("Table is Dimension", expected);
verify("Table where Table is Dimension", expected); verify("Table where Table is Dimension", expected);
verify("Table isa Dimension where name = 'sales'", // Not supported since it requires two singleSrcQuery, one for isa clause other for where clause
"g.V().has('__typeName', 'Table').has('__traitNames', within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()"); // verify("Table isa Dimension where name = 'sales'",
// "g.V().has('__typeName', 'Table').has('__traitNames', within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()");
} }
@Test @Test
...@@ -90,10 +91,15 @@ public class QueryProcessorTest { ...@@ -90,10 +91,15 @@ public class QueryProcessorTest {
public void tableSelectColumns() { public void tableSelectColumns() {
String exMain = "g.V().has('__typeName', 'Table').out('__Table.columns').limit(10).toList()"; String exMain = "g.V().has('__typeName', 'Table').out('__Table.columns').limit(10).toList()";
String exSel = "def f(r){ r }"; String exSel = "def f(r){ r }";
String exSel1 = "def f(r){ return [['db.name']].plus(r.collect({[it.value('DB.name')]})).unique(); }";
verify("Table select columns limit 10", getExpected(exSel, exMain)); verify("Table select columns limit 10", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'Table').out('__Table.db').limit(25).toList()"; String exMain2 = "g.V().has('__typeName', 'Table').out('__Table.db').limit(25).toList()";
verify("Table select db.name", getExpected(exSel, exMain2)); verify("Table select db", getExpected(exSel, exMain2));
String exMain3 = "g.V().has('__typeName', 'Table').out('__Table.db').limit(25).toList()";
verify("Table select db.name", getExpected(exSel1, exMain3));
} }
@Test(enabled = false) @Test(enabled = false)
...@@ -117,7 +123,7 @@ public class QueryProcessorTest { ...@@ -117,7 +123,7 @@ public class QueryProcessorTest {
String exSel = "def f(r){ return [['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique(); }"; String exSel = "def f(r){ return [['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique(); }";
String exMain = "g.V().has('__typeName', 'DB').as('d').order().by('DB.owner)').limit(25).toList()"; String exMain = "g.V().has('__typeName', 'DB').as('d').order().by('DB.owner').limit(25).toList()";
verify("DB as d select d.name, d.owner orderby (d.owner) limit 25", getExpected(exSel, exMain)); verify("DB as d select d.name, d.owner orderby (d.owner) limit 25", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'Table').and(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.createTime', gt('1388563200000'))).order().by('Table.createTime').limit(25).toList()"; String exMain2 = "g.V().has('__typeName', 'Table').and(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.createTime', gt('1388563200000'))).order().by('Table.createTime').limit(25).toList()";
...@@ -151,7 +157,7 @@ public class QueryProcessorTest { ...@@ -151,7 +157,7 @@ public class QueryProcessorTest {
verify("Table where Asset.name like \"Tab*\"", verify("Table where Asset.name like \"Tab*\"",
"g.V().has('__typeName', 'Table').has('Table.name', org.janusgraph.core.attribute.Text.textRegex(\"Tab.*\")).limit(25).toList()"); "g.V().has('__typeName', 'Table').has('Table.name', org.janusgraph.core.attribute.Text.textRegex(\"Tab.*\")).limit(25).toList()");
verify("from Table where (db.name = \"Reporting\")", verify("from Table where (db.name = \"Reporting\")",
"g.V().has('__typeName', 'Table').out('__Table.db').has('DB.name', eq(\"Reporting\")).in('__Table.db').limit(25).toList()"); "g.V().has('__typeName', 'Table').out('__Table.db').has('DB.name', eq(\"Reporting\")).dedup().in('__Table.db').limit(25).toList()");
} }
@Test @Test
...@@ -169,14 +175,6 @@ public class QueryProcessorTest { ...@@ -169,14 +175,6 @@ public class QueryProcessorTest {
} }
@Test @Test
public void multipleWhereClauses() {
String exSel = "def f(r){ return [['c.owner','c.name','c.dataType']].plus(r.collect({[it.value('Column.owner'),it.value('Column.name'),it.value('Column.dataType')]})).unique(); }";
String exMain = "g.V().has('__typeName', 'Table').has('Table.name', eq(\"sales_fact\")).out('__Table.columns').as('c').limit(25).toList()";
verify("Table where name=\"sales_fact\", columns as c select c.owner, c.name, c.dataType", getExpected(exSel, exMain));
;
}
@Test
public void subType() { public void subType() {
String exMain = "g.V().has('__typeName', within('Asset','Table')).limit(25).toList()"; String exMain = "g.V().has('__typeName', within('Asset','Table')).limit(25).toList()";
String exSel = "def f(r){ return [['name','owner']].plus(r.collect({[it.value('Asset.name'),it.value('Asset.owner')]})).unique(); }"; String exSel = "def f(r){ return [['name','owner']].plus(r.collect({[it.value('Asset.name'),it.value('Asset.owner')]})).unique(); }";
...@@ -253,7 +251,7 @@ public class QueryProcessorTest { ...@@ -253,7 +251,7 @@ public class QueryProcessorTest {
verify("hive_db where hive_db.name='Reporting' and hive_db.createTime < '2017-12-12T02:35:58.440Z'", verify("hive_db where hive_db.name='Reporting' and hive_db.createTime < '2017-12-12T02:35:58.440Z'",
"g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', eq('Reporting')),__.has('hive_db.createTime', lt('1513046158440'))).limit(25).toList()"); "g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', eq('Reporting')),__.has('hive_db.createTime', lt('1513046158440'))).limit(25).toList()");
verify("Table where db.name='Sales' and db.clusterName='cl1'", verify("Table where db.name='Sales' and db.clusterName='cl1'",
"g.V().has('__typeName', 'Table').and(__.out('__Table.db').has('DB.name', eq('Sales')).in('__Table.db'),__.out('__Table.db').has('DB.clusterName', eq('cl1')).in('__Table.db')).limit(25).toList()"); "g.V().has('__typeName', 'Table').and(__.out('__Table.db').has('DB.name', eq('Sales')).dedup().in('__Table.db'),__.out('__Table.db').has('DB.clusterName', eq('cl1')).dedup().in('__Table.db')).limit(25).toList()");
} }
private void verify(String dsl, String expectedGremlin) { private void verify(String dsl, String expectedGremlin) {
...@@ -288,16 +286,15 @@ public class QueryProcessorTest { ...@@ -288,16 +286,15 @@ public class QueryProcessorTest {
} }
private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) { private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) {
AtlasTypeRegistry registry = mock(AtlasTypeRegistry.class); AtlasTypeRegistry registry = mock(AtlasTypeRegistry.class);
org.apache.atlas.query.Lookup lookup = new TestLookup(errorList, registry); org.apache.atlas.query.Lookup lookup = new TestLookup(errorList, registry);
QueryProcessor.Context context = new QueryProcessor.Context(errorList, lookup); GremlinQueryComposer.Context context = new GremlinQueryComposer.Context(errorList, lookup);
QueryProcessor queryProcessor = new QueryProcessor(lookup, context); GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(lookup, context);
DSLVisitor qv = new DSLVisitor(queryProcessor); DSLVisitor qv = new DSLVisitor(gremlinQueryComposer);
qv.visit(queryContext); qv.visit(queryContext);
queryProcessor.close();
String s = queryProcessor.getText(); String s = gremlinQueryComposer.get();
assertTrue(StringUtils.isNotEmpty(s)); assertTrue(StringUtils.isNotEmpty(s));
return s; return s;
} }
...@@ -326,7 +323,7 @@ public class QueryProcessorTest { ...@@ -326,7 +323,7 @@ public class QueryProcessorTest {
} }
@Override @Override
public String getQualifiedName(QueryProcessor.Context context, String name) { public String getQualifiedName(GremlinQueryComposer.Context context, String name) {
if(name.contains(".")) if(name.contains("."))
return name; return name;
...@@ -334,7 +331,7 @@ public class QueryProcessorTest { ...@@ -334,7 +331,7 @@ public class QueryProcessorTest {
} }
@Override @Override
public boolean isPrimitive(QueryProcessor.Context context, String attributeName) { public boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName) {
return attributeName.equals("name") || return attributeName.equals("name") ||
attributeName.equals("owner") || attributeName.equals("owner") ||
attributeName.equals("createdTime") || attributeName.equals("createdTime") ||
...@@ -343,7 +340,7 @@ public class QueryProcessorTest { ...@@ -343,7 +340,7 @@ public class QueryProcessorTest {
} }
@Override @Override
public String getRelationshipEdgeLabel(QueryProcessor.Context context, String attributeName) { public String getRelationshipEdgeLabel(GremlinQueryComposer.Context context, String attributeName) {
if (attributeName.equalsIgnoreCase("columns")) if (attributeName.equalsIgnoreCase("columns"))
return "__Table.columns"; return "__Table.columns";
if (attributeName.equalsIgnoreCase("db")) if (attributeName.equalsIgnoreCase("db"))
...@@ -353,29 +350,29 @@ public class QueryProcessorTest { ...@@ -353,29 +350,29 @@ public class QueryProcessorTest {
} }
@Override @Override
public boolean hasAttribute(QueryProcessor.Context context, String typeName) { public boolean hasAttribute(GremlinQueryComposer.Context context, String typeName) {
return (context.getActiveTypeName().equals("Table") && typeName.equals("db")) || return (context.getActiveTypeName().equals("Table") && typeName.equals("db")) ||
(context.getActiveTypeName().equals("Table") && typeName.equals("columns")); (context.getActiveTypeName().equals("Table") && typeName.equals("columns"));
} }
@Override @Override
public boolean doesTypeHaveSubTypes(QueryProcessor.Context context) { public boolean doesTypeHaveSubTypes(GremlinQueryComposer.Context context) {
return context.getActiveTypeName().equalsIgnoreCase("Asset"); return context.getActiveTypeName().equalsIgnoreCase("Asset");
} }
@Override @Override
public String getTypeAndSubTypes(QueryProcessor.Context context) { public String getTypeAndSubTypes(GremlinQueryComposer.Context context) {
String[] str = new String[]{"'Asset'", "'Table'"}; String[] str = new String[]{"'Asset'", "'Table'"};
return StringUtils.join(str, ","); return StringUtils.join(str, ",");
} }
@Override @Override
public boolean isTraitType(QueryProcessor.Context context) { public boolean isTraitType(GremlinQueryComposer.Context context) {
return context.getActiveTypeName().equals("PII") || context.getActiveTypeName().equals("Dimension"); return context.getActiveTypeName().equals("PII") || context.getActiveTypeName().equals("Dimension");
} }
@Override @Override
public String getTypeFromEdge(QueryProcessor.Context context, String item) { public String getTypeFromEdge(GremlinQueryComposer.Context context, String item) {
if(context.getActiveTypeName().equals("DB") && item.equals("Table")) { if(context.getActiveTypeName().equals("DB") && item.equals("Table")) {
return "Table"; return "Table";
} else if(context.getActiveTypeName().equals("Table") && item.equals("Column")) { } else if(context.getActiveTypeName().equals("Table") && item.equals("Column")) {
...@@ -389,7 +386,7 @@ public class QueryProcessorTest { ...@@ -389,7 +386,7 @@ public class QueryProcessorTest {
} }
@Override @Override
public boolean isDate(QueryProcessor.Context context, String attributeName) { public boolean isDate(GremlinQueryComposer.Context context, String attributeName) {
return attributeName.equals("createdTime") || return attributeName.equals("createdTime") ||
attributeName.equals("createTime"); attributeName.equals("createTime");
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment