Commit 8db8b5c7 by apoorvnaik Committed by Madhan Neethiraj

ATLAS-2229: DSL implementation using ANTLR - #4 (multiple fixes)

parent 5384a742
......@@ -102,7 +102,8 @@ public enum AtlasErrorCode {
INVALID_ENTITY_FOR_CLASSIFICATION (400, "ATLAS-400-00-055", "Entity (guid=‘{0}‘,typename=‘{1}‘) cannot be classified by Classification ‘{2}‘, because ‘{1}‘ is not in the ClassificationDef's restrictions."),
SAVED_SEARCH_CHANGE_USER(400, "ATLAS-400-00-056", "saved-search {0} can not be moved from user {1} to {2}"),
INVALID_QUERY_PARAM_LENGTH(400, "ATLAS-400-00-057" , "Length of query param {0} exceeds the limit"),
INVALID_QUERY_LENGTH(400, "ATLAS-400-00-057" , "Invalid query length, update {0} to change the limit" ),
INVALID_QUERY_LENGTH(400, "ATLAS-400-00-058" , "Invalid query length, update {0} to change the limit" ),
INVALID_DSL_QUERY(400, "ATLAS-400-00-059" , "Invalid DSL query: {0} Reason: {1}. Please refer to Atlas DSL grammar for more information" ),
// All Not found enums go here
TYPE_NAME_NOT_FOUND(404, "ATLAS-404-00-001", "Given typename {0} was invalid"),
......
......@@ -33,12 +33,9 @@ import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.instance.AtlasObjectId;
import org.apache.atlas.model.profile.AtlasUserSavedSearch;
import org.apache.atlas.query.Expressions.Expression;
import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.query.GremlinQuery;
import org.apache.atlas.query.GremlinTranslator;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.query.QueryParser;
import org.apache.atlas.query.QueryProcessor;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper;
......@@ -678,16 +675,8 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
}
private GremlinQuery toGremlinQuery(String query, int limit, int offset) throws AtlasBaseException {
QueryParams params = validateSearchParams(limit, offset);
Expression expression = QueryParser.apply(query, params);
if (expression == null) {
throw new AtlasBaseException(DISCOVERY_QUERY_FAILED, query);
}
QueryProcessor queryProcessor = new QueryProcessor(typeRegistry, limit, offset);
Expression validExpression = queryProcessor.validate(expression);
GremlinQuery gremlinQuery = new GremlinTranslator(queryProcessor, validExpression).translate();
QueryParams params = validateSearchParams(limit, offset);
GremlinQuery gremlinQuery = new AtlasDSL.Translator(AtlasDSL.Parser.parse(query), typeRegistry, params.offset(), params.limit()).translate();
if (LOG.isDebugEnabled()) {
LOG.debug("Translated Gremlin Query: {}", gremlinQuery.queryStr());
......
......@@ -17,12 +17,17 @@
*/
package org.apache.atlas.query;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
import org.antlr.v4.runtime.TokenStream;
import org.apache.atlas.query.Expressions.Expression;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.query.antlr4.AtlasDSLLexer;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -33,34 +38,98 @@ import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class QueryParser {
private static final Logger LOG = LoggerFactory.getLogger(QueryParser.class);
public class AtlasDSL {
private static final Set<String> RESERVED_KEYWORDS =
new HashSet<>(Arrays.asList("[", "]", "(", ")", "=", "<", ">", "!=", "<=", ">=", ",", "and", "or", "+", "-",
"*", "/", ".", "select", "from", "where", "groupby", "loop", "isa", "is", "has",
"as", "times", "withPath", "limit", "offset", "orderby", "count", "max", "min",
"sum", "by", "order", "like"));
public static class Parser {
private static final Logger LOG = LoggerFactory.getLogger(Parser.class);
private static final Set<String> RESERVED_KEYWORDS =
new HashSet<>(Arrays.asList("[", "]", "(", ")", "=", "<", ">", "!=", "<=", ">=", ",", "and", "or", "+", "-",
"*", "/", ".", "select", "from", "where", "groupby", "loop", "isa", "is", "has",
"as", "times", "withPath", "limit", "offset", "orderby", "count", "max", "min",
"sum", "by", "order", "like"));
public static boolean isKeyword(String word) {
return RESERVED_KEYWORDS.contains(word);
}
public static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
AtlasDSLParser.QueryContext ret;
try {
InputStream stream = new ByteArrayInputStream(queryStr.getBytes());
AtlasDSLLexer lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
Validator validator = new Validator();
TokenStream inputTokenStream = new CommonTokenStream(lexer);
AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream);
parser.removeErrorListeners();
parser.addErrorListener(validator);
// Validate the syntax of the query here
ret = parser.query();
if (!validator.isValid()) {
LOG.error("Invalid DSL: {} Reason: {}", queryStr, validator.getErrorMsg());
throw new AtlasBaseException(AtlasErrorCode.INVALID_DSL_QUERY, queryStr, validator.getErrorMsg());
}
} catch (IOException e) {
throw new AtlasBaseException(e);
}
return ret;
}
public static boolean isKeyword(String word) {
return RESERVED_KEYWORDS.contains(word);
}
public static Expression apply(String queryStr, QueryParams params) {
Expression ret = null;
static class Validator extends BaseErrorListener {
private boolean isValid = true;
private String errorMsg = "";
@Override
public void syntaxError(final Recognizer<?, ?> recognizer, final Object offendingSymbol, final int line, final int charPositionInLine, final String msg, final RecognitionException e) {
// TODO: Capture multiple datapoints
isValid = false;
errorMsg = msg;
}
public boolean isValid() {
return isValid;
}
public String getErrorMsg() {
return errorMsg;
}
}
try {
InputStream stream = new ByteArrayInputStream(queryStr.getBytes());
AtlasDSLLexer lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
TokenStream inputTokenStream = new CommonTokenStream(lexer);
AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream);
public static class Translator {
private final AtlasDSLParser.QueryContext queryContext;
private final AtlasTypeRegistry typeRegistry;
private final int offset;
private final int limit;
ret = new Expression(parser.query());
} catch (IOException e) {
ret = null;
LOG.error(e.getMessage(), e);
public Translator(final AtlasDSLParser.QueryContext queryContext, AtlasTypeRegistry typeRegistry, int offset, int limit) {
this.queryContext = queryContext;
this.typeRegistry = typeRegistry;
this.offset = offset;
this.limit = limit;
}
return ret;
public GremlinQuery translate() {
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(typeRegistry);
if (offset >= 0) {
if (!gremlinQueryComposer.hasLimitOffset()) {
gremlinQueryComposer.addLimit(Integer.toString(limit), Integer.toString(offset));
}
}
DSLVisitor dslVisitor = new DSLVisitor(gremlinQueryComposer);
// Now process the Query and collect translation in
queryContext.accept(dslVisitor);
return new GremlinQuery(gremlinQueryComposer.get(), gremlinQueryComposer.hasSelect());
}
}
}
......@@ -28,51 +28,51 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
private static final Logger LOG = LoggerFactory.getLogger(DSLVisitor.class);
private static final String AND = "AND";
private static final String OR = "OR";
private final QueryProcessor queryProcessor;
private final GremlinQueryComposer gremlinQueryComposer;
public DSLVisitor(QueryProcessor queryProcessor) {
this.queryProcessor = queryProcessor;
public DSLVisitor(GremlinQueryComposer gremlinQueryComposer) {
this.gremlinQueryComposer = gremlinQueryComposer;
}
@Override
public String visitIsClause(IsClauseContext ctx) {
public Void visitIsClause(IsClauseContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitIsClause({})", ctx);
}
queryProcessor.addFromIsA(ctx.arithE().getText(), ctx.identifier().getText());
gremlinQueryComposer.addFromIsA(ctx.arithE().getText(), ctx.identifier().getText());
return super.visitIsClause(ctx);
}
@Override
public String visitHasClause(HasClauseContext ctx) {
public Void visitHasClause(HasClauseContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitHasClause({})", ctx);
}
queryProcessor.addFromProperty(ctx.arithE().getText(), ctx.identifier().getText());
gremlinQueryComposer.addFromProperty(ctx.arithE().getText(), ctx.identifier().getText());
return super.visitHasClause(ctx);
}
@Override
public String visitLimitOffset(LimitOffsetContext ctx) {
public Void visitLimitOffset(LimitOffsetContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitLimitOffset({})", ctx);
}
queryProcessor.addLimit(ctx.limitClause().NUMBER().toString(),
(ctx.offsetClause() == null ? "0" : ctx.offsetClause().NUMBER().getText()));
gremlinQueryComposer.addLimit(ctx.limitClause().NUMBER().toString(),
(ctx.offsetClause() == null ? "0" : ctx.offsetClause().NUMBER().getText()));
return super.visitLimitOffset(ctx);
}
@Override
public String visitSelectExpr(SelectExprContext ctx) {
public Void visitSelectExpr(SelectExprContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitSelectExpr({})", ctx);
}
......@@ -85,7 +85,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
String[] items = new String[ctx.selectExpression().size()];
String[] labels = new String[ctx.selectExpression().size()];
QueryProcessor.SelectExprMetadata selectExprMetadata = new QueryProcessor.SelectExprMetadata();
GremlinQueryComposer.SelectExprMetadata selectExprMetadata = new GremlinQueryComposer.SelectExprMetadata();
for (int i = 0; i < ctx.selectExpression().size(); i++) {
SelectExpressionContext selectExpression = ctx.selectExpression(i);
......@@ -116,23 +116,25 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
selectExprMetadata.setItems(items);
selectExprMetadata.setLabels(labels);
queryProcessor.addSelect(selectExprMetadata);
gremlinQueryComposer.addSelect(selectExprMetadata);
}
return super.visitSelectExpr(ctx);
}
@Override
public String visitOrderByExpr(OrderByExprContext ctx) {
public Void visitOrderByExpr(OrderByExprContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitOrderByExpr({})", ctx);
}
queryProcessor.addOrderBy(ctx.expr().getText(), (ctx.sortOrder() != null && ctx.sortOrder().getText().equalsIgnoreCase("desc")));
// Extract the attribute from parentheses
String text = ctx.expr().getText().replace("(", "").replace(")", "");
gremlinQueryComposer.addOrderBy(text, (ctx.sortOrder() != null && ctx.sortOrder().getText().equalsIgnoreCase("desc")));
return super.visitOrderByExpr(ctx);
}
@Override
public String visitWhereClause(WhereClauseContext ctx) {
public Void visitWhereClause(WhereClauseContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitWhereClause({})", ctx);
}
......@@ -141,12 +143,12 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
// The first expr shouldn't be processed if there are following exprs
ExprContext expr = ctx.expr();
processExpr(expr, queryProcessor);
processExpr(expr, gremlinQueryComposer);
return super.visitWhereClause(ctx);
}
@Override
public String visitFromExpression(final FromExpressionContext ctx) {
public Void visitFromExpression(final FromExpressionContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitFromExpression({})", ctx);
}
......@@ -155,38 +157,38 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
AliasExprContext aliasExpr = fromSrc.aliasExpr();
if (aliasExpr != null) {
queryProcessor.addFromAlias(aliasExpr.identifier(0).getText(), aliasExpr.identifier(1).getText());
gremlinQueryComposer.addFromAlias(aliasExpr.identifier(0).getText(), aliasExpr.identifier(1).getText());
} else {
if (fromSrc.identifier() != null) {
queryProcessor.addFrom(fromSrc.identifier().getText());
gremlinQueryComposer.addFrom(fromSrc.identifier().getText());
} else {
queryProcessor.addFrom(fromSrc.literal().getText());
gremlinQueryComposer.addFrom(fromSrc.literal().getText());
}
}
return super.visitFromExpression(ctx);
}
@Override
public String visitGroupByExpression(GroupByExpressionContext ctx) {
public Void visitGroupByExpression(GroupByExpressionContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitGroupByExpression({})", ctx);
}
String s = ctx.selectExpr().getText();
queryProcessor.addGroupBy(s);
gremlinQueryComposer.addGroupBy(s);
return super.visitGroupByExpression(ctx);
}
private void processExpr(final ExprContext expr, QueryProcessor queryProcessor) {
private void processExpr(final ExprContext expr, GremlinQueryComposer gremlinQueryComposer) {
if (CollectionUtils.isNotEmpty(expr.exprRight())) {
processExprRight(expr, queryProcessor);
processExprRight(expr, gremlinQueryComposer);
} else {
processExpr(expr.compE(), queryProcessor);
processExpr(expr.compE(), gremlinQueryComposer);
}
}
private void processExprRight(final ExprContext expr, QueryProcessor queryProcessor) {
QueryProcessor nestedProcessor = queryProcessor.createNestedProcessor();
private void processExprRight(final ExprContext expr, GremlinQueryComposer gremlinQueryComposer) {
GremlinQueryComposer nestedProcessor = gremlinQueryComposer.createNestedProcessor();
List<String> nestedQueries = new ArrayList<>();
String prev = null;
......@@ -194,20 +196,20 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
// Process first expression then proceed with the others
// expr -> compE exprRight*
processExpr(expr.compE(), nestedProcessor);
nestedQueries.add(nestedProcessor.getText());
nestedQueries.add(nestedProcessor.get());
for (ExprRightContext exprRight : expr.exprRight()) {
nestedProcessor = queryProcessor.createNestedProcessor();
nestedProcessor = gremlinQueryComposer.createNestedProcessor();
// AND expression
if (exprRight.K_AND() != null) {
if (prev == null) prev = AND;
if (OR.equalsIgnoreCase(prev)) {
// Change of context
QueryProcessor orClause = nestedProcessor.createNestedProcessor();
GremlinQueryComposer orClause = nestedProcessor.createNestedProcessor();
orClause.addOrClauses(nestedQueries);
nestedQueries.clear();
nestedQueries.add(orClause.getText());
nestedQueries.add(orClause.get());
}
prev = AND;
}
......@@ -216,25 +218,25 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
if (prev == null) prev = OR;
if (AND.equalsIgnoreCase(prev)) {
// Change of context
QueryProcessor andClause = nestedProcessor.createNestedProcessor();
GremlinQueryComposer andClause = nestedProcessor.createNestedProcessor();
andClause.addAndClauses(nestedQueries);
nestedQueries.clear();
nestedQueries.add(andClause.getText());
nestedQueries.add(andClause.get());
}
prev = OR;
}
processExpr(exprRight.compE(), nestedProcessor);
nestedQueries.add(nestedProcessor.getText());
nestedQueries.add(nestedProcessor.get());
}
if (AND.equalsIgnoreCase(prev)) {
queryProcessor.addAndClauses(nestedQueries);
gremlinQueryComposer.addAndClauses(nestedQueries);
}
if (OR.equalsIgnoreCase(prev)) {
queryProcessor.addOrClauses(nestedQueries);
gremlinQueryComposer.addOrClauses(nestedQueries);
}
}
private void processExpr(final CompEContext compE, final QueryProcessor queryProcessor) {
private void processExpr(final CompEContext compE, final GremlinQueryComposer gremlinQueryComposer) {
if (compE != null && compE.isClause() == null && compE.hasClause() == null && compE.isClause() == null) {
ComparisonClauseContext comparisonClause = compE.comparisonClause();
......@@ -252,9 +254,9 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
String op = comparisonClause.operator().getText().toUpperCase();
String rhs = comparisonClause.arithE(1).getText();
queryProcessor.addWhere(lhs, op, rhs);
gremlinQueryComposer.addWhere(lhs, op, rhs);
} else {
processExpr(compE.arithE().multiE().atomE().expr(), queryProcessor);
processExpr(compE.arithE().multiE().atomE().expr(), gremlinQueryComposer);
}
}
}
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.query.antlr4.AtlasDSLParser.QueryContext;
public class Expressions {
public static class Expression {
private final QueryContext parsedQuery;
public Expression(QueryContext q) {
parsedQuery = q;
}
public Expression isReady() {
return (parsedQuery != null ? this : null);
}
public void accept(DSLVisitor qv) {
qv.visit(parsedQuery);
}
}
}
......@@ -22,11 +22,11 @@ import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.query.Expressions.Expression;
import org.apache.atlas.type.AtlasArrayType;
import org.apache.atlas.type.AtlasBuiltInTypes;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasStructType;
import org.apache.atlas.type.AtlasStructType.AtlasAttribute;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.commons.lang.StringUtils;
......@@ -43,58 +43,61 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.StringJoiner;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class QueryProcessor {
private static final Logger LOG = LoggerFactory.getLogger(QueryProcessor.class);
public class GremlinQueryComposer {
private static final Logger LOG = LoggerFactory.getLogger(GremlinQueryComposer.class);
private final int DEFAULT_QUERY_RESULT_LIMIT = 25;
private final int DEFAULT_QUERY_RESULT_OFFSET = 0;
private final List<String> errorList = new ArrayList<>();
private final GremlinClauseList queryClauses = new GremlinClauseList();
private int providedLimit = DEFAULT_QUERY_RESULT_LIMIT;
private int providedOffset = DEFAULT_QUERY_RESULT_OFFSET;
private boolean hasSelect = false;
private boolean isSelectNoop = false;
private boolean hasGrpBy = false;
private final org.apache.atlas.query.Lookup lookup;
private final boolean isNestedQuery;
private int currentStep;
private Context context;
private final List<String> errorList = new ArrayList<>();
private final GremlinClauseList queryClauses = new GremlinClauseList();
private final Lookup lookup;
private final boolean isNestedQuery;
private int providedLimit = DEFAULT_QUERY_RESULT_LIMIT;
private int providedOffset = DEFAULT_QUERY_RESULT_OFFSET;
private boolean hasSelect = false;
private boolean isSelectNoop = false;
private boolean hasGroupBy = false;
private boolean hasOrderBy = false;
private boolean hasLimitOffset = false;
private String offset = null;
private String limit = null;
private Context context;
private SelectExprMetadata selectExprMetadata;
@Inject
public QueryProcessor(AtlasTypeRegistry typeRegistry) {
this.isNestedQuery = false;
lookup = new Lookup(errorList, typeRegistry);
context = new Context(errorList, lookup);
public GremlinQueryComposer(AtlasTypeRegistry typeRegistry) {
isNestedQuery = false;
lookup = new RegistryBasedLookup(errorList, typeRegistry);
context = new Context(errorList, lookup);
init();
}
public QueryProcessor(AtlasTypeRegistry typeRegistry, int limit, int offset) {
public GremlinQueryComposer(AtlasTypeRegistry typeRegistry, int limit, int offset) {
this(typeRegistry);
this.providedLimit = limit;
this.providedOffset = offset < 0 ? DEFAULT_QUERY_RESULT_OFFSET : offset;
providedLimit = limit;
providedOffset = offset < 0 ? DEFAULT_QUERY_RESULT_OFFSET : offset;
}
@VisibleForTesting
QueryProcessor(org.apache.atlas.query.Lookup lookup, Context context) {
GremlinQueryComposer(Lookup lookup, Context context) {
this.isNestedQuery = false;
this.lookup = lookup;
this.context = context;
this.lookup = lookup;
this.context = context;
init();
}
public QueryProcessor(org.apache.atlas.query.Lookup registryLookup, boolean isNestedQuery) {
public GremlinQueryComposer(Lookup registryLookup, boolean isNestedQuery) {
this.isNestedQuery = isNestedQuery;
this.lookup = registryLookup;
init();
}
this.lookup = registryLookup;
public Expression validate(Expression expression) {
return expression.isReady();
init();
}
public void addFrom(String typeName) {
......@@ -103,10 +106,12 @@ public class QueryProcessor {
}
IdentifierHelper.Advice ta = getAdvice(typeName);
if(context.shouldRegister(ta.get())) {
context.registerActive(ta.get());
IdentifierHelper.Advice ia = getAdvice(ta.get());
if (ia.isTrait()) {
add(GremlinClause.TRAIT, ia.get());
} else {
......@@ -147,7 +152,7 @@ public class QueryProcessor {
SearchParameters.Operator op = SearchParameters.Operator.fromString(operator);
IdentifierHelper.Advice org = null;
IdentifierHelper.Advice lhsI = getAdvice(lhs);
if(lhsI.isPrimitive() == false) {
if(!lhsI.isPrimitive()) {
introduceType(lhsI);
org = lhsI;
lhsI = getAdvice(lhs);
......@@ -166,28 +171,13 @@ public class QueryProcessor {
add(GremlinClause.HAS_OPERATOR, lhsI.getQualifiedName(), op.getSymbols()[1], rhs);
}
if(org != null && org.isPrimitive() == false && org.getIntroduceType()) {
if(org != null && !org.isPrimitive() && org.getIntroduceType()) {
add(GremlinClause.DEDUP);
add(GremlinClause.IN, org.getEdgeLabel());
context.registerActive(currentType);
}
}
private String addQuotesIfNecessary(String rhs) {
if(IdentifierHelper.isQuoted(rhs)) return rhs;
return quoted(rhs);
}
private static String quoted(String rhs) {
return IdentifierHelper.getQuoted(rhs);
}
private String parseDate(String rhs) {
String s = IdentifierHelper.isQuoted(rhs) ?
IdentifierHelper.removeQuotes(rhs) :
rhs;
return String.format("'%d'", DateTime.parse(s).getMillis());
}
public void addAndClauses(List<String> clauses) {
queryClauses.add(GremlinClause.AND, StringUtils.join(clauses, ','));
}
......@@ -243,17 +233,18 @@ public class QueryProcessor {
if (isSelectNoop) {
transformationFn = GremlinClause.SELECT_EXPR_NOOP_FN;
} else {
transformationFn = hasGrpBy ? GremlinClause.SELECT_WITH_GRPBY_HELPER_FN : GremlinClause.SELECT_EXPR_HELPER_FN;
transformationFn = hasGroupBy ? GremlinClause.SELECT_WITH_GRPBY_HELPER_FN : GremlinClause.SELECT_EXPR_HELPER_FN;
}
queryClauses.add(0, transformationFn, getJoinedQuotedStr(labels), String.join(",", items));
queryClauses.add(GremlinClause.INLINE_TRANSFORM_CALL);
hasSelect = true;
this.selectExprMetadata = selectExprMetadata;
}
}
public QueryProcessor createNestedProcessor() {
QueryProcessor qp = new QueryProcessor(lookup, true);
public GremlinQueryComposer createNestedProcessor() {
GremlinQueryComposer qp = new GremlinQueryComposer(lookup, true);
qp.context = this.context;
return qp;
}
......@@ -282,7 +273,7 @@ public class QueryProcessor {
}
addGroupByClause(item);
hasGrpBy = true;
hasGroupBy = true;
}
public void addLimit(String limit, String offset) {
......@@ -290,40 +281,28 @@ public class QueryProcessor {
LOG.debug("addLimit(limit={}, offset={})", limit, offset);
}
if (offset.equalsIgnoreCase("0")) {
add(GremlinClause.LIMIT, limit);
} else {
addRangeClause(offset, limit);
}
}
this.limit = limit;
this.offset = offset;
public void close() {
if (queryClauses.isEmpty()) {
queryClauses.clear();
return;
}
if (queryClauses.hasClause(GremlinClause.LIMIT) == -1) {
addLimit(Integer.toString(providedLimit), Integer.toString(providedOffset));
}
updatePosition(GremlinClause.LIMIT);
add(GremlinClause.TO_LIST);
updatePosition(GremlinClause.INLINE_TRANSFORM_CALL);
hasLimitOffset = true;
}
public String getText() {
public String get() {
close();
String ret;
String[] items = new String[queryClauses.size()];
int startIdx = hasSelect ? 1 : 0;
int endIdx = hasSelect ? queryClauses.size() - 1 : queryClauses.size();
boolean needTransformation = needTransformation();
int startIdx = needTransformation ? 1 : 0;
int endIdx = needTransformation ? queryClauses.size() - 1 : queryClauses.size();
for (int i = startIdx; i < endIdx; i++) {
items[i] = queryClauses.getValue(i);
}
if (hasSelect) {
String body = StringUtils.join(Stream.of(items).filter(Objects::nonNull).toArray(), ".");
if (needTransformation) {
String body = String.join(".", Stream.of(items).filter(Objects::nonNull).collect(Collectors.toList()));
String inlineFn = queryClauses.getValue(queryClauses.size() - 1);
String funCall = String.format(inlineFn, body);
ret = queryClauses.getValue(0) + funCall;
......@@ -332,7 +311,7 @@ public class QueryProcessor {
}
if (LOG.isDebugEnabled()) {
LOG.debug("getText() => {}", ret);
LOG.debug("get() => {}", ret);
}
return ret;
}
......@@ -341,12 +320,79 @@ public class QueryProcessor {
return hasSelect;
}
public boolean hasLimitOffset() {
return hasLimitOffset;
}
public void addOrderBy(String name, boolean isDesc) {
if (LOG.isDebugEnabled()) {
LOG.debug("addOrderBy(name={}, isDesc={})", name, isDesc);
}
addOrderByClause(name, isDesc);
AtlasAttribute attribute = ((AtlasStructType) context.getActiveType()).getAttribute(getAttributeName(name));
if (hasGroupBy) {
GremlinClause transformationFn = isDesc ? GremlinClause.GRPBY_ORDERBY_DESC_HELPER_FN : GremlinClause.GRPBY_ORDERBY_ASC_HELPER_FN;
add(0, transformationFn, attribute.getQualifiedName(), attribute.getQualifiedName());
add(GremlinClause.INLINE_TRANSFORM_CALL);
} else {
addOrderByClause(attribute.getQualifiedName(), isDesc);
}
hasOrderBy = true;
}
private String getAttributeName(String fqdn) {
final String ret;
int lastSepIdx = fqdn.lastIndexOf('.');
return lastSepIdx == -1 ? fqdn : fqdn.substring(lastSepIdx + 1);
}
private boolean needTransformation() {
return (hasGroupBy && hasSelect && hasOrderBy) || (hasGroupBy && hasOrderBy) || hasSelect;
}
private static String quoted(String rhs) {
return IdentifierHelper.getQuoted(rhs);
}
private String addQuotesIfNecessary(String rhs) {
if(IdentifierHelper.isQuoted(rhs)) return rhs;
return quoted(rhs);
}
private String parseDate(String rhs) {
String s = IdentifierHelper.isQuoted(rhs) ?
IdentifierHelper.removeQuotes(rhs) :
rhs;
return String.format("'%d'", DateTime.parse(s).getMillis());
}
private void close() {
// No limits or toList() need to be added to the nested queries
if (isNestedQuery) return;
if (hasLimitOffset) {
// If there are any aggregator functions then implicit limits shouldn't be applied
if (selectExprMetadata == null || !selectExprMetadata.hasAggregatorFunction()) {
if (offset.equalsIgnoreCase("0")) {
add(GremlinClause.LIMIT, limit);
} else {
addRangeClause(offset, limit);
}
} else {
LOG.warn("Query has aggregator function. Performance might be slow for large dataset");
}
}
if (queryClauses.isEmpty()) {
queryClauses.clear();
return;
}
updatePosition(GremlinClause.LIMIT);
add(GremlinClause.TO_LIST);
updatePosition(GremlinClause.INLINE_TRANSFORM_CALL);
}
private void updatePosition(GremlinClause clause) {
......@@ -381,7 +427,9 @@ public class QueryProcessor {
private String getJoinedQuotedStr(String[] elements) {
StringJoiner joiner = new StringJoiner(",");
Arrays.stream(elements).map(x -> "'" + x + "'").forEach(joiner::add);
Arrays.stream(elements)
.map(x -> x.contains("'") ? "\"" + x + "\"" : "'" + x + "'")
.forEach(joiner::add);
return joiner.toString();
}
......@@ -427,10 +475,11 @@ public class QueryProcessor {
AS("as('%s')"),
DEDUP("dedup()"),
G("g"),
GROUP_BY("group().by('%')"),
GROUP_BY("group().by('%s')"),
HAS("has('%s', %s)"),
HAS_OPERATOR("has('%s', %s(%s))"),
HAS_PROPERTY("has('%s')"),
WHERE("where(%s)"),
HAS_NOT_PROPERTY("hasNot('%s')"),
HAS_TYPE("has('__typeName', '%s')"),
HAS_TYPE_WITHIN("has('__typeName', within(%s))"),
......@@ -454,6 +503,8 @@ public class QueryProcessor {
SELECT_EXPR_NOOP_FN("def f(r){ r }; "),
SELECT_EXPR_HELPER_FN("def f(r){ return [[%s]].plus(r.collect({[%s]})).unique(); }; "),
SELECT_WITH_GRPBY_HELPER_FN("def f(r){ return [[%s]].plus(r.collect({it.values()}).flatten().collect({[%s]})).unique(); }; "),
GRPBY_ORDERBY_ASC_HELPER_FN("def f(r){ m=r.get(0); m.each({ k,v -> m[k] = v.sort{a,b -> a.value('%s') <=> b.value('%s')}}); r }; "),
GRPBY_ORDERBY_DESC_HELPER_FN("def f(r){ m=r.get(0); m.each({ k,v -> m[k] = v.sort{a,b -> b.value('%s') <=> a.value('%s')}}); r; }; "),
INLINE_COUNT("r.size()"),
INLINE_SUM("r.sum({it.value('%s')}).value('%s')"),
INLINE_MAX("r.max({it.value('%s')}).value('%s')"),
......@@ -553,7 +604,7 @@ public class QueryProcessor {
}
public boolean isEmpty() {
return list.size() == 0 || list.size() == 2;
return list.size() == 0;
}
public void clear() {
......@@ -570,11 +621,11 @@ public class QueryProcessor {
@VisibleForTesting
static class Context {
private final List<String> errorList;
org.apache.atlas.query.Lookup lookup;
Lookup lookup;
Map<String, String> aliasMap = new HashMap<>();
private AtlasType activeType;
public Context(List<String> errorList, org.apache.atlas.query.Lookup lookup) {
public Context(List<String> errorList, Lookup lookup) {
this.lookup = lookup;
this.errorList = errorList;
}
......@@ -629,11 +680,11 @@ public class QueryProcessor {
}
}
private static class Lookup implements org.apache.atlas.query.Lookup {
private static class RegistryBasedLookup implements Lookup {
private final List<String> errorList;
private final AtlasTypeRegistry typeRegistry;
public Lookup(List<String> errorList, AtlasTypeRegistry typeRegistry) {
public RegistryBasedLookup(List<String> errorList, AtlasTypeRegistry typeRegistry) {
this.errorList = errorList;
this.typeRegistry = typeRegistry;
}
......@@ -665,10 +716,6 @@ public class QueryProcessor {
return "";
}
protected void addError(String s) {
errorList.add(s);
}
@Override
public boolean isPrimitive(Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType();
......@@ -692,7 +739,7 @@ public class QueryProcessor {
return "";
}
AtlasStructType.AtlasAttribute attr = et.getAttribute(attributeName);
AtlasAttribute attr = et.getAttribute(attributeName);
return (attr != null) ? attr.getRelationshipEdgeLabel() : "";
}
......@@ -736,7 +783,7 @@ public class QueryProcessor {
return "";
}
AtlasStructType.AtlasAttribute attr = et.getAttribute(item);
AtlasAttribute attr = et.getAttribute(item);
if(attr == null) {
return null;
}
......@@ -761,6 +808,10 @@ public class QueryProcessor {
return attr != null && attr.getTypeName().equals(AtlasBaseTypeDef.ATLAS_TYPE_DATE);
}
protected void addError(String s) {
errorList.add(s);
}
}
static class SelectExprMetadata {
......@@ -771,17 +822,23 @@ public class QueryProcessor {
private int sumIdx = -1;
private int maxIdx = -1;
private int minIdx = -1;
private boolean hasAggregator = false;
public String[] getItems() {
return items;
}
public void setItems(final String[] items) {
this.items = items;
}
public int getCountIdx() {
return countIdx;
}
public void setCountIdx(final int countIdx) {
this.countIdx = countIdx;
setHasAggregator();
}
public int getSumIdx() {
......@@ -790,6 +847,7 @@ public class QueryProcessor {
public void setSumIdx(final int sumIdx) {
this.sumIdx = sumIdx;
setHasAggregator();
}
public int getMaxIdx() {
......@@ -798,6 +856,7 @@ public class QueryProcessor {
public void setMaxIdx(final int maxIdx) {
this.maxIdx = maxIdx;
setHasAggregator();
}
public int getMinIdx() {
......@@ -806,18 +865,23 @@ public class QueryProcessor {
public void setMinIdx(final int minIdx) {
this.minIdx = minIdx;
setHasAggregator();
}
public String[] getLabels() {
return labels;
}
public void setItems(final String[] items) {
this.items = items;
}
public void setLabels(final String[] labels) {
this.labels = labels;
}
public boolean hasAggregatorFunction(){
return hasAggregator;
}
private void setHasAggregator() {
hasAggregator = true;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.query.Expressions.Expression;
public class GremlinTranslator {
private final QueryProcessor queryProcessor;
private Expression expression;
public GremlinTranslator(QueryProcessor queryProcessor, Expression expression) {
this.expression = expression;
this.queryProcessor = queryProcessor;
}
public GremlinQuery translate() {
DSLVisitor qv = new DSLVisitor(queryProcessor);
expression.accept(qv);
queryProcessor.close();
GremlinQuery ret = new GremlinQuery(queryProcessor.getText(), queryProcessor.hasSelect());
return ret;
}
}
......@@ -25,17 +25,27 @@ import java.util.regex.Pattern;
public class IdentifierHelper {
public static String stripQuotes(String quotedIdentifier) {
String ret = quotedIdentifier;
private static final Pattern SINGLE_QUOTED_IDENTIFIER = Pattern.compile("'(\\w[\\w\\d\\.\\s]*)'");
private static final Pattern DOUBLE_QUOTED_IDENTIFIER = Pattern.compile("\"(\\w[\\w\\d\\.\\s]*)\"");
private static final Pattern BACKTICK_QUOTED_IDENTIFIER = Pattern.compile("`(\\w[\\w\\d\\.\\s]*)`");
if (isQuoted(quotedIdentifier)) {
ret = quotedIdentifier.substring(1, quotedIdentifier.length() - 1);
public static String get(String quotedIdentifier) {
String ret;
if (quotedIdentifier.charAt(0) == '`') {
ret = extract(BACKTICK_QUOTED_IDENTIFIER, quotedIdentifier);
} else if (quotedIdentifier.charAt(0) == '\'') {
ret = extract(SINGLE_QUOTED_IDENTIFIER, quotedIdentifier);
} else if (quotedIdentifier.charAt(0) == '"') {
ret = extract(DOUBLE_QUOTED_IDENTIFIER, quotedIdentifier);
} else {
ret = quotedIdentifier;
}
return ret;
}
public static Advice create(QueryProcessor.Context context,
public static Advice create(GremlinQueryComposer.Context context,
org.apache.atlas.query.Lookup lookup,
String identifier) {
Advice ia = new Advice(identifier);
......@@ -49,7 +59,7 @@ public class IdentifierHelper {
}
public static String getQualifiedName(org.apache.atlas.query.Lookup lookup,
QueryProcessor.Context context,
GremlinQueryComposer.Context context,
String name) {
return lookup.getQualifiedName(context, name);
}
......@@ -70,7 +80,9 @@ public class IdentifierHelper {
}
public static String removeQuotes(String rhs) {
return rhs.replace("\"", "").replace("'", "");
return rhs.replace("\"", "")
.replace("'", "")
.replace("`", "");
}
public static String getQuoted(String s) {
......@@ -97,10 +109,10 @@ public class IdentifierHelper {
public Advice(String s) {
this.raw = removeQuotes(s);
this.actual = IdentifierHelper.stripQuotes(raw);
this.actual = IdentifierHelper.get(raw);
}
private void update(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) {
private void update(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
newContext = context.isEmpty();
if(!newContext) {
if(context.aliasMap.containsKey(this.raw)) {
......@@ -116,7 +128,7 @@ public class IdentifierHelper {
}
}
private void updateSubTypes(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) {
private void updateSubTypes(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(isTrait) {
return;
}
......@@ -127,7 +139,7 @@ public class IdentifierHelper {
}
}
private void updateEdgeInfo(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) {
private void updateEdgeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(isPrimitive == false && isTrait == false) {
edgeLabel = lookup.getRelationshipEdgeLabel(context, attributeName);
edgeDirection = "OUT";
......@@ -135,7 +147,7 @@ public class IdentifierHelper {
}
}
private void updateTypeInfo(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) {
private void updateTypeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(parts.length == 1) {
typeName = context.getActiveTypeName();
attributeName = parts[0];
......@@ -171,7 +183,7 @@ public class IdentifierHelper {
}
}
private void setIsDate(Lookup lookup, QueryProcessor.Context context) {
private void setIsDate(Lookup lookup, GremlinQueryComposer.Context context) {
if(isPrimitive) {
isDate = lookup.isDate(context, attributeName);
}
......
......@@ -23,21 +23,21 @@ import org.apache.atlas.type.AtlasType;
public interface Lookup {
AtlasType getType(String typeName);
String getQualifiedName(QueryProcessor.Context context, String name);
String getQualifiedName(GremlinQueryComposer.Context context, String name);
boolean isPrimitive(QueryProcessor.Context context, String attributeName);
boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName);
String getRelationshipEdgeLabel(QueryProcessor.Context context, String attributeName);
String getRelationshipEdgeLabel(GremlinQueryComposer.Context context, String attributeName);
boolean hasAttribute(QueryProcessor.Context context, String typeName);
boolean hasAttribute(GremlinQueryComposer.Context context, String typeName);
boolean doesTypeHaveSubTypes(QueryProcessor.Context context);
boolean doesTypeHaveSubTypes(GremlinQueryComposer.Context context);
String getTypeAndSubTypes(QueryProcessor.Context context);
String getTypeAndSubTypes(GremlinQueryComposer.Context context);
boolean isTraitType(QueryProcessor.Context context);
boolean isTraitType(GremlinQueryComposer.Context context);
String getTypeFromEdge(QueryProcessor.Context context, String item);
String getTypeFromEdge(GremlinQueryComposer.Context context, String item);
boolean isDate(QueryProcessor.Context context, String attributeName);
boolean isDate(GremlinQueryComposer.Context context, String attributeName);
}
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.RuntimeMetaData;
import org.antlr.v4.runtime.Vocabulary;
import org.antlr.v4.runtime.VocabularyImpl;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.atn.PredictionContextCache;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class AtlasDSLLexer extends Lexer {
......
SINGLE_LINE_COMMENT=1
MULTILINE_COMMENT=2
WS=3
NUMBER=4
FLOATING_NUMBER=5
BOOL=6
K_COMMA=7
K_PLUS=8
K_MINUS=9
K_STAR=10
K_DIV=11
K_DOT=12
K_LIKE=13
K_AND=14
K_OR=15
K_LPAREN=16
K_LBRACKET=17
K_RPAREN=18
K_RBRACKET=19
K_LT=20
K_LTE=21
K_EQ=22
K_NEQ=23
K_GT=24
K_GTE=25
K_FROM=26
K_WHERE=27
K_ORDERBY=28
K_GROUPBY=29
K_LIMIT=30
K_SELECT=31
K_MAX=32
K_MIN=33
K_SUM=34
K_COUNT=35
K_LOOP=36
K_OFFSET=37
K_AS=38
K_ISA=39
K_IS=40
K_HAS=41
K_ASC=42
K_DESC=43
K_WITHPATH=44
K_TRUE=45
K_FALSE=46
KEYWORD=47
ID=48
STRING=49
','=7
'+'=8
'-'=9
'*'=10
'/'=11
'.'=12
'('=16
'['=17
')'=18
']'=19
......@@ -107,4 +107,4 @@ querySrc: commaDelimitedQueries | spaceDelimitedQueries ;
query: querySrc groupByExpression?
selectClause?
orderByExpr?
limitOffset? ;
limitOffset? ;
\ No newline at end of file
SINGLE_LINE_COMMENT=1
MULTILINE_COMMENT=2
WS=3
NUMBER=4
FLOATING_NUMBER=5
BOOL=6
K_COMMA=7
K_PLUS=8
K_MINUS=9
K_STAR=10
K_DIV=11
K_DOT=12
K_LIKE=13
K_AND=14
K_OR=15
K_LPAREN=16
K_LBRACKET=17
K_RPAREN=18
K_RBRACKET=19
K_LT=20
K_LTE=21
K_EQ=22
K_NEQ=23
K_GT=24
K_GTE=25
K_FROM=26
K_WHERE=27
K_ORDERBY=28
K_GROUPBY=29
K_LIMIT=30
K_SELECT=31
K_MAX=32
K_MIN=33
K_SUM=34
K_COUNT=35
K_LOOP=36
K_OFFSET=37
K_AS=38
K_ISA=39
K_IS=40
K_HAS=41
K_ASC=42
K_DESC=43
K_WITHPATH=44
K_TRUE=45
K_FALSE=46
KEYWORD=47
ID=48
STRING=49
','=7
'+'=8
'-'=9
'*'=10
'/'=11
'.'=12
'('=16
'['=17
')'=18
']'=19
......@@ -23,7 +23,7 @@ import org.apache.atlas.AtlasException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.query.QueryParser;
import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.repository.store.graph.AtlasDefStore;
import org.apache.atlas.type.AtlasTypeRegistry;
......@@ -64,7 +64,7 @@ import java.util.regex.Pattern;
if (!allowReservedKeywords && typeDef instanceof AtlasStructDef) {
final List<AtlasStructDef.AtlasAttributeDef> attributeDefs = ((AtlasStructDef) typeDef).getAttributeDefs();
for (AtlasStructDef.AtlasAttributeDef attrDef : attributeDefs) {
if (QueryParser.isKeyword(attrDef.getName())) {
if (AtlasDSL.Parser.isKeyword(attrDef.getName())) {
throw new AtlasBaseException(AtlasErrorCode.ATTRIBUTE_NAME_INVALID, attrDef.getName(), typeDef.getCategory().name());
}
}
......
......@@ -25,7 +25,7 @@ import org.apache.atlas.model.typedef.AtlasRelationshipDef;
import org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory;
import org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags;
import org.apache.atlas.model.typedef.AtlasRelationshipEndDef;
import org.apache.atlas.query.QueryParser;
import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graphdb.AtlasEdge;
import org.apache.atlas.repository.graphdb.AtlasVertex;
......@@ -354,11 +354,11 @@ public class AtlasRelationshipDefStoreV1 extends AtlasAbstractDefStoreV1<AtlasRe
}
if (!allowReservedKeywords) {
if (QueryParser.isKeyword(end1.getName())) {
if (AtlasDSL.Parser.isKeyword(end1.getName())) {
throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END1_NAME_INVALID, end1.getName());
}
if (QueryParser.isKeyword(end2.getName())) {
if (AtlasDSL.Parser.isKeyword(end2.getName())) {
throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END2_NAME_INVALID, end2.getName());
}
}
......
......@@ -127,8 +127,8 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where hive_db is JdbcAccess", 0},
{"hive_db where hive_db has name", 3},
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
{"Dimension", 5},
{"JdbcAccess", 2},
{"ETL", 5},
......@@ -240,12 +240,12 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 5", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
......@@ -285,7 +285,7 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_column select hive_column.qualifiedName orderby qualifiedName desc limit 5", 5, "hive_column.qualifiedName", false},
{"from hive_db orderby hive_db.owner limit 3", 3, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby 'owner'", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby owner", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby hive_db.owner limit 10 ", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" select name, owner orderby hive_db.name ", 1, "name", true},
......@@ -327,16 +327,16 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 0", 2, "owner", true},
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 1", 2, "owner", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 0", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 5", 0, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 0", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 5", 0, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' ", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 0", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 1", 0, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 0 offset 1", 0, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name ", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 0", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 1", 0, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 0 offset 1", 0, "_col_0", true},
};
}
......
......@@ -41,7 +41,7 @@ import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
public class QueryProcessorTest {
public class GremlinQueryComposerTest {
private List<String> errorList = new ArrayList<>();
@Test
......@@ -56,8 +56,9 @@ public class QueryProcessorTest {
verify("Table isa Dimension", expected);
verify("Table is Dimension", expected);
verify("Table where Table is Dimension", expected);
verify("Table isa Dimension where name = 'sales'",
"g.V().has('__typeName', 'Table').has('__traitNames', within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()");
// Not supported since it requires two singleSrcQuery, one for isa clause other for where clause
// verify("Table isa Dimension where name = 'sales'",
// "g.V().has('__typeName', 'Table').has('__traitNames', within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()");
}
@Test
......@@ -90,10 +91,15 @@ public class QueryProcessorTest {
public void tableSelectColumns() {
String exMain = "g.V().has('__typeName', 'Table').out('__Table.columns').limit(10).toList()";
String exSel = "def f(r){ r }";
String exSel1 = "def f(r){ return [['db.name']].plus(r.collect({[it.value('DB.name')]})).unique(); }";
verify("Table select columns limit 10", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'Table').out('__Table.db').limit(25).toList()";
verify("Table select db.name", getExpected(exSel, exMain2));
verify("Table select db", getExpected(exSel, exMain2));
String exMain3 = "g.V().has('__typeName', 'Table').out('__Table.db').limit(25).toList()";
verify("Table select db.name", getExpected(exSel1, exMain3));
}
@Test(enabled = false)
......@@ -117,7 +123,7 @@ public class QueryProcessorTest {
String exSel = "def f(r){ return [['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique(); }";
String exMain = "g.V().has('__typeName', 'DB').as('d').order().by('DB.owner)').limit(25).toList()";
String exMain = "g.V().has('__typeName', 'DB').as('d').order().by('DB.owner').limit(25).toList()";
verify("DB as d select d.name, d.owner orderby (d.owner) limit 25", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'Table').and(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.createTime', gt('1388563200000'))).order().by('Table.createTime').limit(25).toList()";
......@@ -151,7 +157,7 @@ public class QueryProcessorTest {
verify("Table where Asset.name like \"Tab*\"",
"g.V().has('__typeName', 'Table').has('Table.name', org.janusgraph.core.attribute.Text.textRegex(\"Tab.*\")).limit(25).toList()");
verify("from Table where (db.name = \"Reporting\")",
"g.V().has('__typeName', 'Table').out('__Table.db').has('DB.name', eq(\"Reporting\")).in('__Table.db').limit(25).toList()");
"g.V().has('__typeName', 'Table').out('__Table.db').has('DB.name', eq(\"Reporting\")).dedup().in('__Table.db').limit(25).toList()");
}
@Test
......@@ -169,14 +175,6 @@ public class QueryProcessorTest {
}
@Test
public void multipleWhereClauses() {
String exSel = "def f(r){ return [['c.owner','c.name','c.dataType']].plus(r.collect({[it.value('Column.owner'),it.value('Column.name'),it.value('Column.dataType')]})).unique(); }";
String exMain = "g.V().has('__typeName', 'Table').has('Table.name', eq(\"sales_fact\")).out('__Table.columns').as('c').limit(25).toList()";
verify("Table where name=\"sales_fact\", columns as c select c.owner, c.name, c.dataType", getExpected(exSel, exMain));
;
}
@Test
public void subType() {
String exMain = "g.V().has('__typeName', within('Asset','Table')).limit(25).toList()";
String exSel = "def f(r){ return [['name','owner']].plus(r.collect({[it.value('Asset.name'),it.value('Asset.owner')]})).unique(); }";
......@@ -253,7 +251,7 @@ public class QueryProcessorTest {
verify("hive_db where hive_db.name='Reporting' and hive_db.createTime < '2017-12-12T02:35:58.440Z'",
"g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', eq('Reporting')),__.has('hive_db.createTime', lt('1513046158440'))).limit(25).toList()");
verify("Table where db.name='Sales' and db.clusterName='cl1'",
"g.V().has('__typeName', 'Table').and(__.out('__Table.db').has('DB.name', eq('Sales')).in('__Table.db'),__.out('__Table.db').has('DB.clusterName', eq('cl1')).in('__Table.db')).limit(25).toList()");
"g.V().has('__typeName', 'Table').and(__.out('__Table.db').has('DB.name', eq('Sales')).dedup().in('__Table.db'),__.out('__Table.db').has('DB.clusterName', eq('cl1')).dedup().in('__Table.db')).limit(25).toList()");
}
private void verify(String dsl, String expectedGremlin) {
......@@ -288,16 +286,15 @@ public class QueryProcessorTest {
}
private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) {
AtlasTypeRegistry registry = mock(AtlasTypeRegistry.class);
org.apache.atlas.query.Lookup lookup = new TestLookup(errorList, registry);
QueryProcessor.Context context = new QueryProcessor.Context(errorList, lookup);
AtlasTypeRegistry registry = mock(AtlasTypeRegistry.class);
org.apache.atlas.query.Lookup lookup = new TestLookup(errorList, registry);
GremlinQueryComposer.Context context = new GremlinQueryComposer.Context(errorList, lookup);
QueryProcessor queryProcessor = new QueryProcessor(lookup, context);
DSLVisitor qv = new DSLVisitor(queryProcessor);
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(lookup, context);
DSLVisitor qv = new DSLVisitor(gremlinQueryComposer);
qv.visit(queryContext);
queryProcessor.close();
String s = queryProcessor.getText();
String s = gremlinQueryComposer.get();
assertTrue(StringUtils.isNotEmpty(s));
return s;
}
......@@ -326,7 +323,7 @@ public class QueryProcessorTest {
}
@Override
public String getQualifiedName(QueryProcessor.Context context, String name) {
public String getQualifiedName(GremlinQueryComposer.Context context, String name) {
if(name.contains("."))
return name;
......@@ -334,7 +331,7 @@ public class QueryProcessorTest {
}
@Override
public boolean isPrimitive(QueryProcessor.Context context, String attributeName) {
public boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName) {
return attributeName.equals("name") ||
attributeName.equals("owner") ||
attributeName.equals("createdTime") ||
......@@ -343,7 +340,7 @@ public class QueryProcessorTest {
}
@Override
public String getRelationshipEdgeLabel(QueryProcessor.Context context, String attributeName) {
public String getRelationshipEdgeLabel(GremlinQueryComposer.Context context, String attributeName) {
if (attributeName.equalsIgnoreCase("columns"))
return "__Table.columns";
if (attributeName.equalsIgnoreCase("db"))
......@@ -353,29 +350,29 @@ public class QueryProcessorTest {
}
@Override
public boolean hasAttribute(QueryProcessor.Context context, String typeName) {
public boolean hasAttribute(GremlinQueryComposer.Context context, String typeName) {
return (context.getActiveTypeName().equals("Table") && typeName.equals("db")) ||
(context.getActiveTypeName().equals("Table") && typeName.equals("columns"));
}
@Override
public boolean doesTypeHaveSubTypes(QueryProcessor.Context context) {
public boolean doesTypeHaveSubTypes(GremlinQueryComposer.Context context) {
return context.getActiveTypeName().equalsIgnoreCase("Asset");
}
@Override
public String getTypeAndSubTypes(QueryProcessor.Context context) {
public String getTypeAndSubTypes(GremlinQueryComposer.Context context) {
String[] str = new String[]{"'Asset'", "'Table'"};
return StringUtils.join(str, ",");
}
@Override
public boolean isTraitType(QueryProcessor.Context context) {
public boolean isTraitType(GremlinQueryComposer.Context context) {
return context.getActiveTypeName().equals("PII") || context.getActiveTypeName().equals("Dimension");
}
@Override
public String getTypeFromEdge(QueryProcessor.Context context, String item) {
public String getTypeFromEdge(GremlinQueryComposer.Context context, String item) {
if(context.getActiveTypeName().equals("DB") && item.equals("Table")) {
return "Table";
} else if(context.getActiveTypeName().equals("Table") && item.equals("Column")) {
......@@ -389,7 +386,7 @@ public class QueryProcessorTest {
}
@Override
public boolean isDate(QueryProcessor.Context context, String attributeName) {
public boolean isDate(GremlinQueryComposer.Context context, String attributeName) {
return attributeName.equals("createdTime") ||
attributeName.equals("createTime");
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment