Commit 8db8b5c7 by apoorvnaik Committed by Madhan Neethiraj

ATLAS-2229: DSL implementation using ANTLR - #4 (multiple fixes)

parent 5384a742
...@@ -102,7 +102,8 @@ public enum AtlasErrorCode { ...@@ -102,7 +102,8 @@ public enum AtlasErrorCode {
INVALID_ENTITY_FOR_CLASSIFICATION (400, "ATLAS-400-00-055", "Entity (guid=‘{0}‘,typename=‘{1}‘) cannot be classified by Classification ‘{2}‘, because ‘{1}‘ is not in the ClassificationDef's restrictions."), INVALID_ENTITY_FOR_CLASSIFICATION (400, "ATLAS-400-00-055", "Entity (guid=‘{0}‘,typename=‘{1}‘) cannot be classified by Classification ‘{2}‘, because ‘{1}‘ is not in the ClassificationDef's restrictions."),
SAVED_SEARCH_CHANGE_USER(400, "ATLAS-400-00-056", "saved-search {0} can not be moved from user {1} to {2}"), SAVED_SEARCH_CHANGE_USER(400, "ATLAS-400-00-056", "saved-search {0} can not be moved from user {1} to {2}"),
INVALID_QUERY_PARAM_LENGTH(400, "ATLAS-400-00-057" , "Length of query param {0} exceeds the limit"), INVALID_QUERY_PARAM_LENGTH(400, "ATLAS-400-00-057" , "Length of query param {0} exceeds the limit"),
INVALID_QUERY_LENGTH(400, "ATLAS-400-00-057" , "Invalid query length, update {0} to change the limit" ), INVALID_QUERY_LENGTH(400, "ATLAS-400-00-058" , "Invalid query length, update {0} to change the limit" ),
INVALID_DSL_QUERY(400, "ATLAS-400-00-059" , "Invalid DSL query: {0} Reason: {1}. Please refer to Atlas DSL grammar for more information" ),
// All Not found enums go here // All Not found enums go here
TYPE_NAME_NOT_FOUND(404, "ATLAS-404-00-001", "Given typename {0} was invalid"), TYPE_NAME_NOT_FOUND(404, "ATLAS-404-00-001", "Given typename {0} was invalid"),
......
...@@ -33,12 +33,9 @@ import org.apache.atlas.model.discovery.SearchParameters; ...@@ -33,12 +33,9 @@ import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.model.instance.AtlasEntityHeader; import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.instance.AtlasObjectId; import org.apache.atlas.model.instance.AtlasObjectId;
import org.apache.atlas.model.profile.AtlasUserSavedSearch; import org.apache.atlas.model.profile.AtlasUserSavedSearch;
import org.apache.atlas.query.Expressions.Expression; import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.query.GremlinQuery; import org.apache.atlas.query.GremlinQuery;
import org.apache.atlas.query.GremlinTranslator;
import org.apache.atlas.query.QueryParams; import org.apache.atlas.query.QueryParams;
import org.apache.atlas.query.QueryParser;
import org.apache.atlas.query.QueryProcessor;
import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer; import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper; import org.apache.atlas.repository.graph.GraphHelper;
...@@ -678,16 +675,8 @@ public class EntityDiscoveryService implements AtlasDiscoveryService { ...@@ -678,16 +675,8 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
} }
private GremlinQuery toGremlinQuery(String query, int limit, int offset) throws AtlasBaseException { private GremlinQuery toGremlinQuery(String query, int limit, int offset) throws AtlasBaseException {
QueryParams params = validateSearchParams(limit, offset); QueryParams params = validateSearchParams(limit, offset);
Expression expression = QueryParser.apply(query, params); GremlinQuery gremlinQuery = new AtlasDSL.Translator(AtlasDSL.Parser.parse(query), typeRegistry, params.offset(), params.limit()).translate();
if (expression == null) {
throw new AtlasBaseException(DISCOVERY_QUERY_FAILED, query);
}
QueryProcessor queryProcessor = new QueryProcessor(typeRegistry, limit, offset);
Expression validExpression = queryProcessor.validate(expression);
GremlinQuery gremlinQuery = new GremlinTranslator(queryProcessor, validExpression).translate();
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Translated Gremlin Query: {}", gremlinQuery.queryStr()); LOG.debug("Translated Gremlin Query: {}", gremlinQuery.queryStr());
......
...@@ -17,12 +17,17 @@ ...@@ -17,12 +17,17 @@
*/ */
package org.apache.atlas.query; package org.apache.atlas.query;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CharStreams; import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.TokenStream;
import org.apache.atlas.query.Expressions.Expression; import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.query.antlr4.AtlasDSLLexer; import org.apache.atlas.query.antlr4.AtlasDSLLexer;
import org.apache.atlas.query.antlr4.AtlasDSLParser; import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -33,34 +38,98 @@ import java.util.Arrays; ...@@ -33,34 +38,98 @@ import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
public class QueryParser { public class AtlasDSL {
private static final Logger LOG = LoggerFactory.getLogger(QueryParser.class);
private static final Set<String> RESERVED_KEYWORDS = public static class Parser {
new HashSet<>(Arrays.asList("[", "]", "(", ")", "=", "<", ">", "!=", "<=", ">=", ",", "and", "or", "+", "-", private static final Logger LOG = LoggerFactory.getLogger(Parser.class);
"*", "/", ".", "select", "from", "where", "groupby", "loop", "isa", "is", "has",
"as", "times", "withPath", "limit", "offset", "orderby", "count", "max", "min", private static final Set<String> RESERVED_KEYWORDS =
"sum", "by", "order", "like")); new HashSet<>(Arrays.asList("[", "]", "(", ")", "=", "<", ">", "!=", "<=", ">=", ",", "and", "or", "+", "-",
"*", "/", ".", "select", "from", "where", "groupby", "loop", "isa", "is", "has",
"as", "times", "withPath", "limit", "offset", "orderby", "count", "max", "min",
"sum", "by", "order", "like"));
public static boolean isKeyword(String word) {
return RESERVED_KEYWORDS.contains(word);
}
public static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
AtlasDSLParser.QueryContext ret;
try {
InputStream stream = new ByteArrayInputStream(queryStr.getBytes());
AtlasDSLLexer lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
Validator validator = new Validator();
TokenStream inputTokenStream = new CommonTokenStream(lexer);
AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream);
parser.removeErrorListeners();
parser.addErrorListener(validator);
// Validate the syntax of the query here
ret = parser.query();
if (!validator.isValid()) {
LOG.error("Invalid DSL: {} Reason: {}", queryStr, validator.getErrorMsg());
throw new AtlasBaseException(AtlasErrorCode.INVALID_DSL_QUERY, queryStr, validator.getErrorMsg());
}
} catch (IOException e) {
throw new AtlasBaseException(e);
}
return ret;
}
public static boolean isKeyword(String word) {
return RESERVED_KEYWORDS.contains(word);
} }
public static Expression apply(String queryStr, QueryParams params) { static class Validator extends BaseErrorListener {
Expression ret = null; private boolean isValid = true;
private String errorMsg = "";
@Override
public void syntaxError(final Recognizer<?, ?> recognizer, final Object offendingSymbol, final int line, final int charPositionInLine, final String msg, final RecognitionException e) {
// TODO: Capture multiple datapoints
isValid = false;
errorMsg = msg;
}
public boolean isValid() {
return isValid;
}
public String getErrorMsg() {
return errorMsg;
}
}
try { public static class Translator {
InputStream stream = new ByteArrayInputStream(queryStr.getBytes()); private final AtlasDSLParser.QueryContext queryContext;
AtlasDSLLexer lexer = new AtlasDSLLexer(CharStreams.fromStream(stream)); private final AtlasTypeRegistry typeRegistry;
TokenStream inputTokenStream = new CommonTokenStream(lexer); private final int offset;
AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream); private final int limit;
ret = new Expression(parser.query()); public Translator(final AtlasDSLParser.QueryContext queryContext, AtlasTypeRegistry typeRegistry, int offset, int limit) {
} catch (IOException e) { this.queryContext = queryContext;
ret = null; this.typeRegistry = typeRegistry;
LOG.error(e.getMessage(), e); this.offset = offset;
this.limit = limit;
} }
return ret; public GremlinQuery translate() {
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(typeRegistry);
if (offset >= 0) {
if (!gremlinQueryComposer.hasLimitOffset()) {
gremlinQueryComposer.addLimit(Integer.toString(limit), Integer.toString(offset));
}
}
DSLVisitor dslVisitor = new DSLVisitor(gremlinQueryComposer);
// Now process the Query and collect translation in
queryContext.accept(dslVisitor);
return new GremlinQuery(gremlinQueryComposer.get(), gremlinQueryComposer.hasSelect());
}
} }
} }
...@@ -28,51 +28,51 @@ import java.util.ArrayList; ...@@ -28,51 +28,51 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> { public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
private static final Logger LOG = LoggerFactory.getLogger(DSLVisitor.class); private static final Logger LOG = LoggerFactory.getLogger(DSLVisitor.class);
private static final String AND = "AND"; private static final String AND = "AND";
private static final String OR = "OR"; private static final String OR = "OR";
private final QueryProcessor queryProcessor; private final GremlinQueryComposer gremlinQueryComposer;
public DSLVisitor(QueryProcessor queryProcessor) { public DSLVisitor(GremlinQueryComposer gremlinQueryComposer) {
this.queryProcessor = queryProcessor; this.gremlinQueryComposer = gremlinQueryComposer;
} }
@Override @Override
public String visitIsClause(IsClauseContext ctx) { public Void visitIsClause(IsClauseContext ctx) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitIsClause({})", ctx); LOG.debug("=> DSLVisitor.visitIsClause({})", ctx);
} }
queryProcessor.addFromIsA(ctx.arithE().getText(), ctx.identifier().getText()); gremlinQueryComposer.addFromIsA(ctx.arithE().getText(), ctx.identifier().getText());
return super.visitIsClause(ctx); return super.visitIsClause(ctx);
} }
@Override @Override
public String visitHasClause(HasClauseContext ctx) { public Void visitHasClause(HasClauseContext ctx) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitHasClause({})", ctx); LOG.debug("=> DSLVisitor.visitHasClause({})", ctx);
} }
queryProcessor.addFromProperty(ctx.arithE().getText(), ctx.identifier().getText()); gremlinQueryComposer.addFromProperty(ctx.arithE().getText(), ctx.identifier().getText());
return super.visitHasClause(ctx); return super.visitHasClause(ctx);
} }
@Override @Override
public String visitLimitOffset(LimitOffsetContext ctx) { public Void visitLimitOffset(LimitOffsetContext ctx) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitLimitOffset({})", ctx); LOG.debug("=> DSLVisitor.visitLimitOffset({})", ctx);
} }
queryProcessor.addLimit(ctx.limitClause().NUMBER().toString(), gremlinQueryComposer.addLimit(ctx.limitClause().NUMBER().toString(),
(ctx.offsetClause() == null ? "0" : ctx.offsetClause().NUMBER().getText())); (ctx.offsetClause() == null ? "0" : ctx.offsetClause().NUMBER().getText()));
return super.visitLimitOffset(ctx); return super.visitLimitOffset(ctx);
} }
@Override @Override
public String visitSelectExpr(SelectExprContext ctx) { public Void visitSelectExpr(SelectExprContext ctx) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitSelectExpr({})", ctx); LOG.debug("=> DSLVisitor.visitSelectExpr({})", ctx);
} }
...@@ -85,7 +85,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> { ...@@ -85,7 +85,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
String[] items = new String[ctx.selectExpression().size()]; String[] items = new String[ctx.selectExpression().size()];
String[] labels = new String[ctx.selectExpression().size()]; String[] labels = new String[ctx.selectExpression().size()];
QueryProcessor.SelectExprMetadata selectExprMetadata = new QueryProcessor.SelectExprMetadata(); GremlinQueryComposer.SelectExprMetadata selectExprMetadata = new GremlinQueryComposer.SelectExprMetadata();
for (int i = 0; i < ctx.selectExpression().size(); i++) { for (int i = 0; i < ctx.selectExpression().size(); i++) {
SelectExpressionContext selectExpression = ctx.selectExpression(i); SelectExpressionContext selectExpression = ctx.selectExpression(i);
...@@ -116,23 +116,25 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> { ...@@ -116,23 +116,25 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
selectExprMetadata.setItems(items); selectExprMetadata.setItems(items);
selectExprMetadata.setLabels(labels); selectExprMetadata.setLabels(labels);
queryProcessor.addSelect(selectExprMetadata); gremlinQueryComposer.addSelect(selectExprMetadata);
} }
return super.visitSelectExpr(ctx); return super.visitSelectExpr(ctx);
} }
@Override @Override
public String visitOrderByExpr(OrderByExprContext ctx) { public Void visitOrderByExpr(OrderByExprContext ctx) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitOrderByExpr({})", ctx); LOG.debug("=> DSLVisitor.visitOrderByExpr({})", ctx);
} }
queryProcessor.addOrderBy(ctx.expr().getText(), (ctx.sortOrder() != null && ctx.sortOrder().getText().equalsIgnoreCase("desc"))); // Extract the attribute from parentheses
String text = ctx.expr().getText().replace("(", "").replace(")", "");
gremlinQueryComposer.addOrderBy(text, (ctx.sortOrder() != null && ctx.sortOrder().getText().equalsIgnoreCase("desc")));
return super.visitOrderByExpr(ctx); return super.visitOrderByExpr(ctx);
} }
@Override @Override
public String visitWhereClause(WhereClauseContext ctx) { public Void visitWhereClause(WhereClauseContext ctx) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitWhereClause({})", ctx); LOG.debug("=> DSLVisitor.visitWhereClause({})", ctx);
} }
...@@ -141,12 +143,12 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> { ...@@ -141,12 +143,12 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
// The first expr shouldn't be processed if there are following exprs // The first expr shouldn't be processed if there are following exprs
ExprContext expr = ctx.expr(); ExprContext expr = ctx.expr();
processExpr(expr, queryProcessor); processExpr(expr, gremlinQueryComposer);
return super.visitWhereClause(ctx); return super.visitWhereClause(ctx);
} }
@Override @Override
public String visitFromExpression(final FromExpressionContext ctx) { public Void visitFromExpression(final FromExpressionContext ctx) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitFromExpression({})", ctx); LOG.debug("=> DSLVisitor.visitFromExpression({})", ctx);
} }
...@@ -155,38 +157,38 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> { ...@@ -155,38 +157,38 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
AliasExprContext aliasExpr = fromSrc.aliasExpr(); AliasExprContext aliasExpr = fromSrc.aliasExpr();
if (aliasExpr != null) { if (aliasExpr != null) {
queryProcessor.addFromAlias(aliasExpr.identifier(0).getText(), aliasExpr.identifier(1).getText()); gremlinQueryComposer.addFromAlias(aliasExpr.identifier(0).getText(), aliasExpr.identifier(1).getText());
} else { } else {
if (fromSrc.identifier() != null) { if (fromSrc.identifier() != null) {
queryProcessor.addFrom(fromSrc.identifier().getText()); gremlinQueryComposer.addFrom(fromSrc.identifier().getText());
} else { } else {
queryProcessor.addFrom(fromSrc.literal().getText()); gremlinQueryComposer.addFrom(fromSrc.literal().getText());
} }
} }
return super.visitFromExpression(ctx); return super.visitFromExpression(ctx);
} }
@Override @Override
public String visitGroupByExpression(GroupByExpressionContext ctx) { public Void visitGroupByExpression(GroupByExpressionContext ctx) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitGroupByExpression({})", ctx); LOG.debug("=> DSLVisitor.visitGroupByExpression({})", ctx);
} }
String s = ctx.selectExpr().getText(); String s = ctx.selectExpr().getText();
queryProcessor.addGroupBy(s); gremlinQueryComposer.addGroupBy(s);
return super.visitGroupByExpression(ctx); return super.visitGroupByExpression(ctx);
} }
private void processExpr(final ExprContext expr, QueryProcessor queryProcessor) { private void processExpr(final ExprContext expr, GremlinQueryComposer gremlinQueryComposer) {
if (CollectionUtils.isNotEmpty(expr.exprRight())) { if (CollectionUtils.isNotEmpty(expr.exprRight())) {
processExprRight(expr, queryProcessor); processExprRight(expr, gremlinQueryComposer);
} else { } else {
processExpr(expr.compE(), queryProcessor); processExpr(expr.compE(), gremlinQueryComposer);
} }
} }
private void processExprRight(final ExprContext expr, QueryProcessor queryProcessor) { private void processExprRight(final ExprContext expr, GremlinQueryComposer gremlinQueryComposer) {
QueryProcessor nestedProcessor = queryProcessor.createNestedProcessor(); GremlinQueryComposer nestedProcessor = gremlinQueryComposer.createNestedProcessor();
List<String> nestedQueries = new ArrayList<>(); List<String> nestedQueries = new ArrayList<>();
String prev = null; String prev = null;
...@@ -194,20 +196,20 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> { ...@@ -194,20 +196,20 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
// Process first expression then proceed with the others // Process first expression then proceed with the others
// expr -> compE exprRight* // expr -> compE exprRight*
processExpr(expr.compE(), nestedProcessor); processExpr(expr.compE(), nestedProcessor);
nestedQueries.add(nestedProcessor.getText()); nestedQueries.add(nestedProcessor.get());
for (ExprRightContext exprRight : expr.exprRight()) { for (ExprRightContext exprRight : expr.exprRight()) {
nestedProcessor = queryProcessor.createNestedProcessor(); nestedProcessor = gremlinQueryComposer.createNestedProcessor();
// AND expression // AND expression
if (exprRight.K_AND() != null) { if (exprRight.K_AND() != null) {
if (prev == null) prev = AND; if (prev == null) prev = AND;
if (OR.equalsIgnoreCase(prev)) { if (OR.equalsIgnoreCase(prev)) {
// Change of context // Change of context
QueryProcessor orClause = nestedProcessor.createNestedProcessor(); GremlinQueryComposer orClause = nestedProcessor.createNestedProcessor();
orClause.addOrClauses(nestedQueries); orClause.addOrClauses(nestedQueries);
nestedQueries.clear(); nestedQueries.clear();
nestedQueries.add(orClause.getText()); nestedQueries.add(orClause.get());
} }
prev = AND; prev = AND;
} }
...@@ -216,25 +218,25 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> { ...@@ -216,25 +218,25 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
if (prev == null) prev = OR; if (prev == null) prev = OR;
if (AND.equalsIgnoreCase(prev)) { if (AND.equalsIgnoreCase(prev)) {
// Change of context // Change of context
QueryProcessor andClause = nestedProcessor.createNestedProcessor(); GremlinQueryComposer andClause = nestedProcessor.createNestedProcessor();
andClause.addAndClauses(nestedQueries); andClause.addAndClauses(nestedQueries);
nestedQueries.clear(); nestedQueries.clear();
nestedQueries.add(andClause.getText()); nestedQueries.add(andClause.get());
} }
prev = OR; prev = OR;
} }
processExpr(exprRight.compE(), nestedProcessor); processExpr(exprRight.compE(), nestedProcessor);
nestedQueries.add(nestedProcessor.getText()); nestedQueries.add(nestedProcessor.get());
} }
if (AND.equalsIgnoreCase(prev)) { if (AND.equalsIgnoreCase(prev)) {
queryProcessor.addAndClauses(nestedQueries); gremlinQueryComposer.addAndClauses(nestedQueries);
} }
if (OR.equalsIgnoreCase(prev)) { if (OR.equalsIgnoreCase(prev)) {
queryProcessor.addOrClauses(nestedQueries); gremlinQueryComposer.addOrClauses(nestedQueries);
} }
} }
private void processExpr(final CompEContext compE, final QueryProcessor queryProcessor) { private void processExpr(final CompEContext compE, final GremlinQueryComposer gremlinQueryComposer) {
if (compE != null && compE.isClause() == null && compE.hasClause() == null && compE.isClause() == null) { if (compE != null && compE.isClause() == null && compE.hasClause() == null && compE.isClause() == null) {
ComparisonClauseContext comparisonClause = compE.comparisonClause(); ComparisonClauseContext comparisonClause = compE.comparisonClause();
...@@ -252,9 +254,9 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> { ...@@ -252,9 +254,9 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
String op = comparisonClause.operator().getText().toUpperCase(); String op = comparisonClause.operator().getText().toUpperCase();
String rhs = comparisonClause.arithE(1).getText(); String rhs = comparisonClause.arithE(1).getText();
queryProcessor.addWhere(lhs, op, rhs); gremlinQueryComposer.addWhere(lhs, op, rhs);
} else { } else {
processExpr(compE.arithE().multiE().atomE().expr(), queryProcessor); processExpr(compE.arithE().multiE().atomE().expr(), gremlinQueryComposer);
} }
} }
} }
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.query.antlr4.AtlasDSLParser.QueryContext;
public class Expressions {
public static class Expression {
private final QueryContext parsedQuery;
public Expression(QueryContext q) {
parsedQuery = q;
}
public Expression isReady() {
return (parsedQuery != null ? this : null);
}
public void accept(DSLVisitor qv) {
qv.visit(parsedQuery);
}
}
}
...@@ -22,11 +22,11 @@ import org.apache.atlas.exception.AtlasBaseException; ...@@ -22,11 +22,11 @@ import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory; import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.discovery.SearchParameters; import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef; import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.query.Expressions.Expression;
import org.apache.atlas.type.AtlasArrayType; import org.apache.atlas.type.AtlasArrayType;
import org.apache.atlas.type.AtlasBuiltInTypes; import org.apache.atlas.type.AtlasBuiltInTypes;
import org.apache.atlas.type.AtlasEntityType; import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasStructType; import org.apache.atlas.type.AtlasStructType;
import org.apache.atlas.type.AtlasStructType.AtlasAttribute;
import org.apache.atlas.type.AtlasType; import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
...@@ -43,58 +43,61 @@ import java.util.List; ...@@ -43,58 +43,61 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.StringJoiner; import java.util.StringJoiner;
import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
public class QueryProcessor { public class GremlinQueryComposer {
private static final Logger LOG = LoggerFactory.getLogger(QueryProcessor.class); private static final Logger LOG = LoggerFactory.getLogger(GremlinQueryComposer.class);
private final int DEFAULT_QUERY_RESULT_LIMIT = 25; private final int DEFAULT_QUERY_RESULT_LIMIT = 25;
private final int DEFAULT_QUERY_RESULT_OFFSET = 0; private final int DEFAULT_QUERY_RESULT_OFFSET = 0;
private final List<String> errorList = new ArrayList<>(); private final List<String> errorList = new ArrayList<>();
private final GremlinClauseList queryClauses = new GremlinClauseList(); private final GremlinClauseList queryClauses = new GremlinClauseList();
private final Lookup lookup;
private int providedLimit = DEFAULT_QUERY_RESULT_LIMIT; private final boolean isNestedQuery;
private int providedOffset = DEFAULT_QUERY_RESULT_OFFSET; private int providedLimit = DEFAULT_QUERY_RESULT_LIMIT;
private boolean hasSelect = false; private int providedOffset = DEFAULT_QUERY_RESULT_OFFSET;
private boolean isSelectNoop = false; private boolean hasSelect = false;
private boolean hasGrpBy = false; private boolean isSelectNoop = false;
private boolean hasGroupBy = false;
private final org.apache.atlas.query.Lookup lookup; private boolean hasOrderBy = false;
private final boolean isNestedQuery; private boolean hasLimitOffset = false;
private int currentStep; private String offset = null;
private Context context; private String limit = null;
private Context context;
private SelectExprMetadata selectExprMetadata;
@Inject @Inject
public QueryProcessor(AtlasTypeRegistry typeRegistry) { public GremlinQueryComposer(AtlasTypeRegistry typeRegistry) {
this.isNestedQuery = false; isNestedQuery = false;
lookup = new Lookup(errorList, typeRegistry); lookup = new RegistryBasedLookup(errorList, typeRegistry);
context = new Context(errorList, lookup); context = new Context(errorList, lookup);
init(); init();
} }
public QueryProcessor(AtlasTypeRegistry typeRegistry, int limit, int offset) { public GremlinQueryComposer(AtlasTypeRegistry typeRegistry, int limit, int offset) {
this(typeRegistry); this(typeRegistry);
this.providedLimit = limit;
this.providedOffset = offset < 0 ? DEFAULT_QUERY_RESULT_OFFSET : offset; providedLimit = limit;
providedOffset = offset < 0 ? DEFAULT_QUERY_RESULT_OFFSET : offset;
} }
@VisibleForTesting @VisibleForTesting
QueryProcessor(org.apache.atlas.query.Lookup lookup, Context context) { GremlinQueryComposer(Lookup lookup, Context context) {
this.isNestedQuery = false; this.isNestedQuery = false;
this.lookup = lookup; this.lookup = lookup;
this.context = context; this.context = context;
init(); init();
} }
public QueryProcessor(org.apache.atlas.query.Lookup registryLookup, boolean isNestedQuery) { public GremlinQueryComposer(Lookup registryLookup, boolean isNestedQuery) {
this.isNestedQuery = isNestedQuery; this.isNestedQuery = isNestedQuery;
this.lookup = registryLookup; this.lookup = registryLookup;
init();
}
public Expression validate(Expression expression) { init();
return expression.isReady();
} }
public void addFrom(String typeName) { public void addFrom(String typeName) {
...@@ -103,10 +106,12 @@ public class QueryProcessor { ...@@ -103,10 +106,12 @@ public class QueryProcessor {
} }
IdentifierHelper.Advice ta = getAdvice(typeName); IdentifierHelper.Advice ta = getAdvice(typeName);
if(context.shouldRegister(ta.get())) { if(context.shouldRegister(ta.get())) {
context.registerActive(ta.get()); context.registerActive(ta.get());
IdentifierHelper.Advice ia = getAdvice(ta.get()); IdentifierHelper.Advice ia = getAdvice(ta.get());
if (ia.isTrait()) { if (ia.isTrait()) {
add(GremlinClause.TRAIT, ia.get()); add(GremlinClause.TRAIT, ia.get());
} else { } else {
...@@ -147,7 +152,7 @@ public class QueryProcessor { ...@@ -147,7 +152,7 @@ public class QueryProcessor {
SearchParameters.Operator op = SearchParameters.Operator.fromString(operator); SearchParameters.Operator op = SearchParameters.Operator.fromString(operator);
IdentifierHelper.Advice org = null; IdentifierHelper.Advice org = null;
IdentifierHelper.Advice lhsI = getAdvice(lhs); IdentifierHelper.Advice lhsI = getAdvice(lhs);
if(lhsI.isPrimitive() == false) { if(!lhsI.isPrimitive()) {
introduceType(lhsI); introduceType(lhsI);
org = lhsI; org = lhsI;
lhsI = getAdvice(lhs); lhsI = getAdvice(lhs);
...@@ -166,28 +171,13 @@ public class QueryProcessor { ...@@ -166,28 +171,13 @@ public class QueryProcessor {
add(GremlinClause.HAS_OPERATOR, lhsI.getQualifiedName(), op.getSymbols()[1], rhs); add(GremlinClause.HAS_OPERATOR, lhsI.getQualifiedName(), op.getSymbols()[1], rhs);
} }
if(org != null && org.isPrimitive() == false && org.getIntroduceType()) { if(org != null && !org.isPrimitive() && org.getIntroduceType()) {
add(GremlinClause.DEDUP);
add(GremlinClause.IN, org.getEdgeLabel()); add(GremlinClause.IN, org.getEdgeLabel());
context.registerActive(currentType); context.registerActive(currentType);
} }
} }
private String addQuotesIfNecessary(String rhs) {
if(IdentifierHelper.isQuoted(rhs)) return rhs;
return quoted(rhs);
}
private static String quoted(String rhs) {
return IdentifierHelper.getQuoted(rhs);
}
private String parseDate(String rhs) {
String s = IdentifierHelper.isQuoted(rhs) ?
IdentifierHelper.removeQuotes(rhs) :
rhs;
return String.format("'%d'", DateTime.parse(s).getMillis());
}
public void addAndClauses(List<String> clauses) { public void addAndClauses(List<String> clauses) {
queryClauses.add(GremlinClause.AND, StringUtils.join(clauses, ',')); queryClauses.add(GremlinClause.AND, StringUtils.join(clauses, ','));
} }
...@@ -243,17 +233,18 @@ public class QueryProcessor { ...@@ -243,17 +233,18 @@ public class QueryProcessor {
if (isSelectNoop) { if (isSelectNoop) {
transformationFn = GremlinClause.SELECT_EXPR_NOOP_FN; transformationFn = GremlinClause.SELECT_EXPR_NOOP_FN;
} else { } else {
transformationFn = hasGrpBy ? GremlinClause.SELECT_WITH_GRPBY_HELPER_FN : GremlinClause.SELECT_EXPR_HELPER_FN; transformationFn = hasGroupBy ? GremlinClause.SELECT_WITH_GRPBY_HELPER_FN : GremlinClause.SELECT_EXPR_HELPER_FN;
} }
queryClauses.add(0, transformationFn, getJoinedQuotedStr(labels), String.join(",", items)); queryClauses.add(0, transformationFn, getJoinedQuotedStr(labels), String.join(",", items));
queryClauses.add(GremlinClause.INLINE_TRANSFORM_CALL); queryClauses.add(GremlinClause.INLINE_TRANSFORM_CALL);
hasSelect = true; hasSelect = true;
this.selectExprMetadata = selectExprMetadata;
} }
} }
public QueryProcessor createNestedProcessor() { public GremlinQueryComposer createNestedProcessor() {
QueryProcessor qp = new QueryProcessor(lookup, true); GremlinQueryComposer qp = new GremlinQueryComposer(lookup, true);
qp.context = this.context; qp.context = this.context;
return qp; return qp;
} }
...@@ -282,7 +273,7 @@ public class QueryProcessor { ...@@ -282,7 +273,7 @@ public class QueryProcessor {
} }
addGroupByClause(item); addGroupByClause(item);
hasGrpBy = true; hasGroupBy = true;
} }
public void addLimit(String limit, String offset) { public void addLimit(String limit, String offset) {
...@@ -290,40 +281,28 @@ public class QueryProcessor { ...@@ -290,40 +281,28 @@ public class QueryProcessor {
LOG.debug("addLimit(limit={}, offset={})", limit, offset); LOG.debug("addLimit(limit={}, offset={})", limit, offset);
} }
if (offset.equalsIgnoreCase("0")) { this.limit = limit;
add(GremlinClause.LIMIT, limit); this.offset = offset;
} else {
addRangeClause(offset, limit);
}
}
public void close() { hasLimitOffset = true;
if (queryClauses.isEmpty()) {
queryClauses.clear();
return;
}
if (queryClauses.hasClause(GremlinClause.LIMIT) == -1) {
addLimit(Integer.toString(providedLimit), Integer.toString(providedOffset));
}
updatePosition(GremlinClause.LIMIT);
add(GremlinClause.TO_LIST);
updatePosition(GremlinClause.INLINE_TRANSFORM_CALL);
} }
public String getText() { public String get() {
close();
String ret; String ret;
String[] items = new String[queryClauses.size()]; String[] items = new String[queryClauses.size()];
int startIdx = hasSelect ? 1 : 0; boolean needTransformation = needTransformation();
int endIdx = hasSelect ? queryClauses.size() - 1 : queryClauses.size(); int startIdx = needTransformation ? 1 : 0;
int endIdx = needTransformation ? queryClauses.size() - 1 : queryClauses.size();
for (int i = startIdx; i < endIdx; i++) { for (int i = startIdx; i < endIdx; i++) {
items[i] = queryClauses.getValue(i); items[i] = queryClauses.getValue(i);
} }
if (hasSelect) { if (needTransformation) {
String body = StringUtils.join(Stream.of(items).filter(Objects::nonNull).toArray(), "."); String body = String.join(".", Stream.of(items).filter(Objects::nonNull).collect(Collectors.toList()));
String inlineFn = queryClauses.getValue(queryClauses.size() - 1); String inlineFn = queryClauses.getValue(queryClauses.size() - 1);
String funCall = String.format(inlineFn, body); String funCall = String.format(inlineFn, body);
ret = queryClauses.getValue(0) + funCall; ret = queryClauses.getValue(0) + funCall;
...@@ -332,7 +311,7 @@ public class QueryProcessor { ...@@ -332,7 +311,7 @@ public class QueryProcessor {
} }
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("getText() => {}", ret); LOG.debug("get() => {}", ret);
} }
return ret; return ret;
} }
...@@ -341,12 +320,79 @@ public class QueryProcessor { ...@@ -341,12 +320,79 @@ public class QueryProcessor {
return hasSelect; return hasSelect;
} }
public boolean hasLimitOffset() {
return hasLimitOffset;
}
public void addOrderBy(String name, boolean isDesc) { public void addOrderBy(String name, boolean isDesc) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("addOrderBy(name={}, isDesc={})", name, isDesc); LOG.debug("addOrderBy(name={}, isDesc={})", name, isDesc);
} }
addOrderByClause(name, isDesc); AtlasAttribute attribute = ((AtlasStructType) context.getActiveType()).getAttribute(getAttributeName(name));
if (hasGroupBy) {
GremlinClause transformationFn = isDesc ? GremlinClause.GRPBY_ORDERBY_DESC_HELPER_FN : GremlinClause.GRPBY_ORDERBY_ASC_HELPER_FN;
add(0, transformationFn, attribute.getQualifiedName(), attribute.getQualifiedName());
add(GremlinClause.INLINE_TRANSFORM_CALL);
} else {
addOrderByClause(attribute.getQualifiedName(), isDesc);
}
hasOrderBy = true;
}
private String getAttributeName(String fqdn) {
final String ret;
int lastSepIdx = fqdn.lastIndexOf('.');
return lastSepIdx == -1 ? fqdn : fqdn.substring(lastSepIdx + 1);
}
private boolean needTransformation() {
return (hasGroupBy && hasSelect && hasOrderBy) || (hasGroupBy && hasOrderBy) || hasSelect;
}
private static String quoted(String rhs) {
return IdentifierHelper.getQuoted(rhs);
}
private String addQuotesIfNecessary(String rhs) {
if(IdentifierHelper.isQuoted(rhs)) return rhs;
return quoted(rhs);
}
private String parseDate(String rhs) {
String s = IdentifierHelper.isQuoted(rhs) ?
IdentifierHelper.removeQuotes(rhs) :
rhs;
return String.format("'%d'", DateTime.parse(s).getMillis());
}
private void close() {
// No limits or toList() need to be added to the nested queries
if (isNestedQuery) return;
if (hasLimitOffset) {
// If there are any aggregator functions then implicit limits shouldn't be applied
if (selectExprMetadata == null || !selectExprMetadata.hasAggregatorFunction()) {
if (offset.equalsIgnoreCase("0")) {
add(GremlinClause.LIMIT, limit);
} else {
addRangeClause(offset, limit);
}
} else {
LOG.warn("Query has aggregator function. Performance might be slow for large dataset");
}
}
if (queryClauses.isEmpty()) {
queryClauses.clear();
return;
}
updatePosition(GremlinClause.LIMIT);
add(GremlinClause.TO_LIST);
updatePosition(GremlinClause.INLINE_TRANSFORM_CALL);
} }
private void updatePosition(GremlinClause clause) { private void updatePosition(GremlinClause clause) {
...@@ -381,7 +427,9 @@ public class QueryProcessor { ...@@ -381,7 +427,9 @@ public class QueryProcessor {
private String getJoinedQuotedStr(String[] elements) { private String getJoinedQuotedStr(String[] elements) {
StringJoiner joiner = new StringJoiner(","); StringJoiner joiner = new StringJoiner(",");
Arrays.stream(elements).map(x -> "'" + x + "'").forEach(joiner::add); Arrays.stream(elements)
.map(x -> x.contains("'") ? "\"" + x + "\"" : "'" + x + "'")
.forEach(joiner::add);
return joiner.toString(); return joiner.toString();
} }
...@@ -427,10 +475,11 @@ public class QueryProcessor { ...@@ -427,10 +475,11 @@ public class QueryProcessor {
AS("as('%s')"), AS("as('%s')"),
DEDUP("dedup()"), DEDUP("dedup()"),
G("g"), G("g"),
GROUP_BY("group().by('%')"), GROUP_BY("group().by('%s')"),
HAS("has('%s', %s)"), HAS("has('%s', %s)"),
HAS_OPERATOR("has('%s', %s(%s))"), HAS_OPERATOR("has('%s', %s(%s))"),
HAS_PROPERTY("has('%s')"), HAS_PROPERTY("has('%s')"),
WHERE("where(%s)"),
HAS_NOT_PROPERTY("hasNot('%s')"), HAS_NOT_PROPERTY("hasNot('%s')"),
HAS_TYPE("has('__typeName', '%s')"), HAS_TYPE("has('__typeName', '%s')"),
HAS_TYPE_WITHIN("has('__typeName', within(%s))"), HAS_TYPE_WITHIN("has('__typeName', within(%s))"),
...@@ -454,6 +503,8 @@ public class QueryProcessor { ...@@ -454,6 +503,8 @@ public class QueryProcessor {
SELECT_EXPR_NOOP_FN("def f(r){ r }; "), SELECT_EXPR_NOOP_FN("def f(r){ r }; "),
SELECT_EXPR_HELPER_FN("def f(r){ return [[%s]].plus(r.collect({[%s]})).unique(); }; "), SELECT_EXPR_HELPER_FN("def f(r){ return [[%s]].plus(r.collect({[%s]})).unique(); }; "),
SELECT_WITH_GRPBY_HELPER_FN("def f(r){ return [[%s]].plus(r.collect({it.values()}).flatten().collect({[%s]})).unique(); }; "), SELECT_WITH_GRPBY_HELPER_FN("def f(r){ return [[%s]].plus(r.collect({it.values()}).flatten().collect({[%s]})).unique(); }; "),
GRPBY_ORDERBY_ASC_HELPER_FN("def f(r){ m=r.get(0); m.each({ k,v -> m[k] = v.sort{a,b -> a.value('%s') <=> b.value('%s')}}); r }; "),
GRPBY_ORDERBY_DESC_HELPER_FN("def f(r){ m=r.get(0); m.each({ k,v -> m[k] = v.sort{a,b -> b.value('%s') <=> a.value('%s')}}); r; }; "),
INLINE_COUNT("r.size()"), INLINE_COUNT("r.size()"),
INLINE_SUM("r.sum({it.value('%s')}).value('%s')"), INLINE_SUM("r.sum({it.value('%s')}).value('%s')"),
INLINE_MAX("r.max({it.value('%s')}).value('%s')"), INLINE_MAX("r.max({it.value('%s')}).value('%s')"),
...@@ -553,7 +604,7 @@ public class QueryProcessor { ...@@ -553,7 +604,7 @@ public class QueryProcessor {
} }
public boolean isEmpty() { public boolean isEmpty() {
return list.size() == 0 || list.size() == 2; return list.size() == 0;
} }
public void clear() { public void clear() {
...@@ -570,11 +621,11 @@ public class QueryProcessor { ...@@ -570,11 +621,11 @@ public class QueryProcessor {
@VisibleForTesting @VisibleForTesting
static class Context { static class Context {
private final List<String> errorList; private final List<String> errorList;
org.apache.atlas.query.Lookup lookup; Lookup lookup;
Map<String, String> aliasMap = new HashMap<>(); Map<String, String> aliasMap = new HashMap<>();
private AtlasType activeType; private AtlasType activeType;
public Context(List<String> errorList, org.apache.atlas.query.Lookup lookup) { public Context(List<String> errorList, Lookup lookup) {
this.lookup = lookup; this.lookup = lookup;
this.errorList = errorList; this.errorList = errorList;
} }
...@@ -629,11 +680,11 @@ public class QueryProcessor { ...@@ -629,11 +680,11 @@ public class QueryProcessor {
} }
} }
private static class Lookup implements org.apache.atlas.query.Lookup { private static class RegistryBasedLookup implements Lookup {
private final List<String> errorList; private final List<String> errorList;
private final AtlasTypeRegistry typeRegistry; private final AtlasTypeRegistry typeRegistry;
public Lookup(List<String> errorList, AtlasTypeRegistry typeRegistry) { public RegistryBasedLookup(List<String> errorList, AtlasTypeRegistry typeRegistry) {
this.errorList = errorList; this.errorList = errorList;
this.typeRegistry = typeRegistry; this.typeRegistry = typeRegistry;
} }
...@@ -665,10 +716,6 @@ public class QueryProcessor { ...@@ -665,10 +716,6 @@ public class QueryProcessor {
return ""; return "";
} }
protected void addError(String s) {
errorList.add(s);
}
@Override @Override
public boolean isPrimitive(Context context, String attributeName) { public boolean isPrimitive(Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType(); AtlasEntityType et = context.getActiveEntityType();
...@@ -692,7 +739,7 @@ public class QueryProcessor { ...@@ -692,7 +739,7 @@ public class QueryProcessor {
return ""; return "";
} }
AtlasStructType.AtlasAttribute attr = et.getAttribute(attributeName); AtlasAttribute attr = et.getAttribute(attributeName);
return (attr != null) ? attr.getRelationshipEdgeLabel() : ""; return (attr != null) ? attr.getRelationshipEdgeLabel() : "";
} }
...@@ -736,7 +783,7 @@ public class QueryProcessor { ...@@ -736,7 +783,7 @@ public class QueryProcessor {
return ""; return "";
} }
AtlasStructType.AtlasAttribute attr = et.getAttribute(item); AtlasAttribute attr = et.getAttribute(item);
if(attr == null) { if(attr == null) {
return null; return null;
} }
...@@ -761,6 +808,10 @@ public class QueryProcessor { ...@@ -761,6 +808,10 @@ public class QueryProcessor {
return attr != null && attr.getTypeName().equals(AtlasBaseTypeDef.ATLAS_TYPE_DATE); return attr != null && attr.getTypeName().equals(AtlasBaseTypeDef.ATLAS_TYPE_DATE);
} }
protected void addError(String s) {
errorList.add(s);
}
} }
static class SelectExprMetadata { static class SelectExprMetadata {
...@@ -771,17 +822,23 @@ public class QueryProcessor { ...@@ -771,17 +822,23 @@ public class QueryProcessor {
private int sumIdx = -1; private int sumIdx = -1;
private int maxIdx = -1; private int maxIdx = -1;
private int minIdx = -1; private int minIdx = -1;
private boolean hasAggregator = false;
public String[] getItems() { public String[] getItems() {
return items; return items;
} }
public void setItems(final String[] items) {
this.items = items;
}
public int getCountIdx() { public int getCountIdx() {
return countIdx; return countIdx;
} }
public void setCountIdx(final int countIdx) { public void setCountIdx(final int countIdx) {
this.countIdx = countIdx; this.countIdx = countIdx;
setHasAggregator();
} }
public int getSumIdx() { public int getSumIdx() {
...@@ -790,6 +847,7 @@ public class QueryProcessor { ...@@ -790,6 +847,7 @@ public class QueryProcessor {
public void setSumIdx(final int sumIdx) { public void setSumIdx(final int sumIdx) {
this.sumIdx = sumIdx; this.sumIdx = sumIdx;
setHasAggregator();
} }
public int getMaxIdx() { public int getMaxIdx() {
...@@ -798,6 +856,7 @@ public class QueryProcessor { ...@@ -798,6 +856,7 @@ public class QueryProcessor {
public void setMaxIdx(final int maxIdx) { public void setMaxIdx(final int maxIdx) {
this.maxIdx = maxIdx; this.maxIdx = maxIdx;
setHasAggregator();
} }
public int getMinIdx() { public int getMinIdx() {
...@@ -806,18 +865,23 @@ public class QueryProcessor { ...@@ -806,18 +865,23 @@ public class QueryProcessor {
public void setMinIdx(final int minIdx) { public void setMinIdx(final int minIdx) {
this.minIdx = minIdx; this.minIdx = minIdx;
setHasAggregator();
} }
public String[] getLabels() { public String[] getLabels() {
return labels; return labels;
} }
public void setItems(final String[] items) {
this.items = items;
}
public void setLabels(final String[] labels) { public void setLabels(final String[] labels) {
this.labels = labels; this.labels = labels;
} }
public boolean hasAggregatorFunction(){
return hasAggregator;
}
private void setHasAggregator() {
hasAggregator = true;
}
} }
} }
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.query.Expressions.Expression;
public class GremlinTranslator {
private final QueryProcessor queryProcessor;
private Expression expression;
public GremlinTranslator(QueryProcessor queryProcessor, Expression expression) {
this.expression = expression;
this.queryProcessor = queryProcessor;
}
public GremlinQuery translate() {
DSLVisitor qv = new DSLVisitor(queryProcessor);
expression.accept(qv);
queryProcessor.close();
GremlinQuery ret = new GremlinQuery(queryProcessor.getText(), queryProcessor.hasSelect());
return ret;
}
}
...@@ -25,17 +25,27 @@ import java.util.regex.Pattern; ...@@ -25,17 +25,27 @@ import java.util.regex.Pattern;
public class IdentifierHelper { public class IdentifierHelper {
public static String stripQuotes(String quotedIdentifier) { private static final Pattern SINGLE_QUOTED_IDENTIFIER = Pattern.compile("'(\\w[\\w\\d\\.\\s]*)'");
String ret = quotedIdentifier; private static final Pattern DOUBLE_QUOTED_IDENTIFIER = Pattern.compile("\"(\\w[\\w\\d\\.\\s]*)\"");
private static final Pattern BACKTICK_QUOTED_IDENTIFIER = Pattern.compile("`(\\w[\\w\\d\\.\\s]*)`");
if (isQuoted(quotedIdentifier)) { public static String get(String quotedIdentifier) {
ret = quotedIdentifier.substring(1, quotedIdentifier.length() - 1); String ret;
if (quotedIdentifier.charAt(0) == '`') {
ret = extract(BACKTICK_QUOTED_IDENTIFIER, quotedIdentifier);
} else if (quotedIdentifier.charAt(0) == '\'') {
ret = extract(SINGLE_QUOTED_IDENTIFIER, quotedIdentifier);
} else if (quotedIdentifier.charAt(0) == '"') {
ret = extract(DOUBLE_QUOTED_IDENTIFIER, quotedIdentifier);
} else {
ret = quotedIdentifier;
} }
return ret; return ret;
} }
public static Advice create(QueryProcessor.Context context, public static Advice create(GremlinQueryComposer.Context context,
org.apache.atlas.query.Lookup lookup, org.apache.atlas.query.Lookup lookup,
String identifier) { String identifier) {
Advice ia = new Advice(identifier); Advice ia = new Advice(identifier);
...@@ -49,7 +59,7 @@ public class IdentifierHelper { ...@@ -49,7 +59,7 @@ public class IdentifierHelper {
} }
public static String getQualifiedName(org.apache.atlas.query.Lookup lookup, public static String getQualifiedName(org.apache.atlas.query.Lookup lookup,
QueryProcessor.Context context, GremlinQueryComposer.Context context,
String name) { String name) {
return lookup.getQualifiedName(context, name); return lookup.getQualifiedName(context, name);
} }
...@@ -70,7 +80,9 @@ public class IdentifierHelper { ...@@ -70,7 +80,9 @@ public class IdentifierHelper {
} }
public static String removeQuotes(String rhs) { public static String removeQuotes(String rhs) {
return rhs.replace("\"", "").replace("'", ""); return rhs.replace("\"", "")
.replace("'", "")
.replace("`", "");
} }
public static String getQuoted(String s) { public static String getQuoted(String s) {
...@@ -97,10 +109,10 @@ public class IdentifierHelper { ...@@ -97,10 +109,10 @@ public class IdentifierHelper {
public Advice(String s) { public Advice(String s) {
this.raw = removeQuotes(s); this.raw = removeQuotes(s);
this.actual = IdentifierHelper.stripQuotes(raw); this.actual = IdentifierHelper.get(raw);
} }
private void update(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) { private void update(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
newContext = context.isEmpty(); newContext = context.isEmpty();
if(!newContext) { if(!newContext) {
if(context.aliasMap.containsKey(this.raw)) { if(context.aliasMap.containsKey(this.raw)) {
...@@ -116,7 +128,7 @@ public class IdentifierHelper { ...@@ -116,7 +128,7 @@ public class IdentifierHelper {
} }
} }
private void updateSubTypes(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) { private void updateSubTypes(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(isTrait) { if(isTrait) {
return; return;
} }
...@@ -127,7 +139,7 @@ public class IdentifierHelper { ...@@ -127,7 +139,7 @@ public class IdentifierHelper {
} }
} }
private void updateEdgeInfo(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) { private void updateEdgeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(isPrimitive == false && isTrait == false) { if(isPrimitive == false && isTrait == false) {
edgeLabel = lookup.getRelationshipEdgeLabel(context, attributeName); edgeLabel = lookup.getRelationshipEdgeLabel(context, attributeName);
edgeDirection = "OUT"; edgeDirection = "OUT";
...@@ -135,7 +147,7 @@ public class IdentifierHelper { ...@@ -135,7 +147,7 @@ public class IdentifierHelper {
} }
} }
private void updateTypeInfo(org.apache.atlas.query.Lookup lookup, QueryProcessor.Context context) { private void updateTypeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(parts.length == 1) { if(parts.length == 1) {
typeName = context.getActiveTypeName(); typeName = context.getActiveTypeName();
attributeName = parts[0]; attributeName = parts[0];
...@@ -171,7 +183,7 @@ public class IdentifierHelper { ...@@ -171,7 +183,7 @@ public class IdentifierHelper {
} }
} }
private void setIsDate(Lookup lookup, QueryProcessor.Context context) { private void setIsDate(Lookup lookup, GremlinQueryComposer.Context context) {
if(isPrimitive) { if(isPrimitive) {
isDate = lookup.isDate(context, attributeName); isDate = lookup.isDate(context, attributeName);
} }
......
...@@ -23,21 +23,21 @@ import org.apache.atlas.type.AtlasType; ...@@ -23,21 +23,21 @@ import org.apache.atlas.type.AtlasType;
public interface Lookup { public interface Lookup {
AtlasType getType(String typeName); AtlasType getType(String typeName);
String getQualifiedName(QueryProcessor.Context context, String name); String getQualifiedName(GremlinQueryComposer.Context context, String name);
boolean isPrimitive(QueryProcessor.Context context, String attributeName); boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName);
String getRelationshipEdgeLabel(QueryProcessor.Context context, String attributeName); String getRelationshipEdgeLabel(GremlinQueryComposer.Context context, String attributeName);
boolean hasAttribute(QueryProcessor.Context context, String typeName); boolean hasAttribute(GremlinQueryComposer.Context context, String typeName);
boolean doesTypeHaveSubTypes(QueryProcessor.Context context); boolean doesTypeHaveSubTypes(GremlinQueryComposer.Context context);
String getTypeAndSubTypes(QueryProcessor.Context context); String getTypeAndSubTypes(GremlinQueryComposer.Context context);
boolean isTraitType(QueryProcessor.Context context); boolean isTraitType(GremlinQueryComposer.Context context);
String getTypeFromEdge(QueryProcessor.Context context, String item); String getTypeFromEdge(GremlinQueryComposer.Context context, String item);
boolean isDate(QueryProcessor.Context context, String attributeName); boolean isDate(GremlinQueryComposer.Context context, String attributeName);
} }
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.g4 by ANTLR 4.7 // Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4; package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.RuntimeMetaData;
import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.Vocabulary;
import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.VocabularyImpl;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.atn.PredictionContextCache;
import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class AtlasDSLLexer extends Lexer { public class AtlasDSLLexer extends Lexer {
......
SINGLE_LINE_COMMENT=1
MULTILINE_COMMENT=2
WS=3
NUMBER=4
FLOATING_NUMBER=5
BOOL=6
K_COMMA=7
K_PLUS=8
K_MINUS=9
K_STAR=10
K_DIV=11
K_DOT=12
K_LIKE=13
K_AND=14
K_OR=15
K_LPAREN=16
K_LBRACKET=17
K_RPAREN=18
K_RBRACKET=19
K_LT=20
K_LTE=21
K_EQ=22
K_NEQ=23
K_GT=24
K_GTE=25
K_FROM=26
K_WHERE=27
K_ORDERBY=28
K_GROUPBY=29
K_LIMIT=30
K_SELECT=31
K_MAX=32
K_MIN=33
K_SUM=34
K_COUNT=35
K_LOOP=36
K_OFFSET=37
K_AS=38
K_ISA=39
K_IS=40
K_HAS=41
K_ASC=42
K_DESC=43
K_WITHPATH=44
K_TRUE=45
K_FALSE=46
KEYWORD=47
ID=48
STRING=49
','=7
'+'=8
'-'=9
'*'=10
'/'=11
'.'=12
'('=16
'['=17
')'=18
']'=19
...@@ -107,4 +107,4 @@ querySrc: commaDelimitedQueries | spaceDelimitedQueries ; ...@@ -107,4 +107,4 @@ querySrc: commaDelimitedQueries | spaceDelimitedQueries ;
query: querySrc groupByExpression? query: querySrc groupByExpression?
selectClause? selectClause?
orderByExpr? orderByExpr?
limitOffset? ; limitOffset? ;
\ No newline at end of file
SINGLE_LINE_COMMENT=1
MULTILINE_COMMENT=2
WS=3
NUMBER=4
FLOATING_NUMBER=5
BOOL=6
K_COMMA=7
K_PLUS=8
K_MINUS=9
K_STAR=10
K_DIV=11
K_DOT=12
K_LIKE=13
K_AND=14
K_OR=15
K_LPAREN=16
K_LBRACKET=17
K_RPAREN=18
K_RBRACKET=19
K_LT=20
K_LTE=21
K_EQ=22
K_NEQ=23
K_GT=24
K_GTE=25
K_FROM=26
K_WHERE=27
K_ORDERBY=28
K_GROUPBY=29
K_LIMIT=30
K_SELECT=31
K_MAX=32
K_MIN=33
K_SUM=34
K_COUNT=35
K_LOOP=36
K_OFFSET=37
K_AS=38
K_ISA=39
K_IS=40
K_HAS=41
K_ASC=42
K_DESC=43
K_WITHPATH=44
K_TRUE=45
K_FALSE=46
KEYWORD=47
ID=48
STRING=49
','=7
'+'=8
'-'=9
'*'=10
'/'=11
'.'=12
'('=16
'['=17
')'=18
']'=19
...@@ -23,7 +23,7 @@ import org.apache.atlas.AtlasException; ...@@ -23,7 +23,7 @@ import org.apache.atlas.AtlasException;
import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef; import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.model.typedef.AtlasStructDef; import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.query.QueryParser; import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.repository.store.graph.AtlasDefStore; import org.apache.atlas.repository.store.graph.AtlasDefStore;
import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.type.AtlasTypeRegistry;
...@@ -64,7 +64,7 @@ import java.util.regex.Pattern; ...@@ -64,7 +64,7 @@ import java.util.regex.Pattern;
if (!allowReservedKeywords && typeDef instanceof AtlasStructDef) { if (!allowReservedKeywords && typeDef instanceof AtlasStructDef) {
final List<AtlasStructDef.AtlasAttributeDef> attributeDefs = ((AtlasStructDef) typeDef).getAttributeDefs(); final List<AtlasStructDef.AtlasAttributeDef> attributeDefs = ((AtlasStructDef) typeDef).getAttributeDefs();
for (AtlasStructDef.AtlasAttributeDef attrDef : attributeDefs) { for (AtlasStructDef.AtlasAttributeDef attrDef : attributeDefs) {
if (QueryParser.isKeyword(attrDef.getName())) { if (AtlasDSL.Parser.isKeyword(attrDef.getName())) {
throw new AtlasBaseException(AtlasErrorCode.ATTRIBUTE_NAME_INVALID, attrDef.getName(), typeDef.getCategory().name()); throw new AtlasBaseException(AtlasErrorCode.ATTRIBUTE_NAME_INVALID, attrDef.getName(), typeDef.getCategory().name());
} }
} }
......
...@@ -25,7 +25,7 @@ import org.apache.atlas.model.typedef.AtlasRelationshipDef; ...@@ -25,7 +25,7 @@ import org.apache.atlas.model.typedef.AtlasRelationshipDef;
import org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory; import org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory;
import org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags; import org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags;
import org.apache.atlas.model.typedef.AtlasRelationshipEndDef; import org.apache.atlas.model.typedef.AtlasRelationshipEndDef;
import org.apache.atlas.query.QueryParser; import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graphdb.AtlasEdge; import org.apache.atlas.repository.graphdb.AtlasEdge;
import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.AtlasVertex;
...@@ -354,11 +354,11 @@ public class AtlasRelationshipDefStoreV1 extends AtlasAbstractDefStoreV1<AtlasRe ...@@ -354,11 +354,11 @@ public class AtlasRelationshipDefStoreV1 extends AtlasAbstractDefStoreV1<AtlasRe
} }
if (!allowReservedKeywords) { if (!allowReservedKeywords) {
if (QueryParser.isKeyword(end1.getName())) { if (AtlasDSL.Parser.isKeyword(end1.getName())) {
throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END1_NAME_INVALID, end1.getName()); throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END1_NAME_INVALID, end1.getName());
} }
if (QueryParser.isKeyword(end2.getName())) { if (AtlasDSL.Parser.isKeyword(end2.getName())) {
throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END2_NAME_INVALID, end2.getName()); throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END2_NAME_INVALID, end2.getName());
} }
} }
......
...@@ -127,8 +127,8 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -127,8 +127,8 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where hive_db is JdbcAccess", 0}, {"hive_db where hive_db is JdbcAccess", 0},
{"hive_db where hive_db has name", 3}, {"hive_db where hive_db has name", 3},
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0}, {"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
{"Dimension", 5}, {"Dimension", 5},
{"JdbcAccess", 2}, {"JdbcAccess", 2},
{"ETL", 5}, {"ETL", 5},
...@@ -240,12 +240,12 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -240,12 +240,12 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0}, {"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 ", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 5", 0}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 5", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1", 0}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
...@@ -285,7 +285,7 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -285,7 +285,7 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_column select hive_column.qualifiedName orderby qualifiedName desc limit 5", 5, "hive_column.qualifiedName", false}, {"hive_column select hive_column.qualifiedName orderby qualifiedName desc limit 5", 5, "hive_column.qualifiedName", false},
{"from hive_db orderby hive_db.owner limit 3", 3, "owner", true}, {"from hive_db orderby hive_db.owner limit 3", 3, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby 'owner'", 1, "owner", true}, {"hive_db where hive_db.name=\"Reporting\" orderby owner", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby hive_db.owner limit 10 ", 1, "owner", true}, {"hive_db where hive_db.name=\"Reporting\" orderby hive_db.owner limit 10 ", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" select name, owner orderby hive_db.name ", 1, "name", true}, {"hive_db where hive_db.name=\"Reporting\" select name, owner orderby hive_db.name ", 1, "name", true},
...@@ -327,16 +327,16 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -327,16 +327,16 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 0", 2, "owner", true}, {"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 0", 2, "owner", true},
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 1", 2, "owner", true}, {"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 1", 2, "owner", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' ", 1, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 ", 1, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 0", 1, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 0", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 5", 0, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 5", 0, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' ", 1, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name ", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 0", 1, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 0", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 1", 0, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 1", 0, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10", 1, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 0 offset 1", 0, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 0 offset 1", 0, "_col_0", true},
}; };
} }
......
...@@ -41,7 +41,7 @@ import static org.testng.Assert.assertNotNull; ...@@ -41,7 +41,7 @@ import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull; import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue; import static org.testng.Assert.assertTrue;
public class QueryProcessorTest { public class GremlinQueryComposerTest {
private List<String> errorList = new ArrayList<>(); private List<String> errorList = new ArrayList<>();
@Test @Test
...@@ -56,8 +56,9 @@ public class QueryProcessorTest { ...@@ -56,8 +56,9 @@ public class QueryProcessorTest {
verify("Table isa Dimension", expected); verify("Table isa Dimension", expected);
verify("Table is Dimension", expected); verify("Table is Dimension", expected);
verify("Table where Table is Dimension", expected); verify("Table where Table is Dimension", expected);
verify("Table isa Dimension where name = 'sales'", // Not supported since it requires two singleSrcQuery, one for isa clause other for where clause
"g.V().has('__typeName', 'Table').has('__traitNames', within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()"); // verify("Table isa Dimension where name = 'sales'",
// "g.V().has('__typeName', 'Table').has('__traitNames', within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()");
} }
@Test @Test
...@@ -90,10 +91,15 @@ public class QueryProcessorTest { ...@@ -90,10 +91,15 @@ public class QueryProcessorTest {
public void tableSelectColumns() { public void tableSelectColumns() {
String exMain = "g.V().has('__typeName', 'Table').out('__Table.columns').limit(10).toList()"; String exMain = "g.V().has('__typeName', 'Table').out('__Table.columns').limit(10).toList()";
String exSel = "def f(r){ r }"; String exSel = "def f(r){ r }";
String exSel1 = "def f(r){ return [['db.name']].plus(r.collect({[it.value('DB.name')]})).unique(); }";
verify("Table select columns limit 10", getExpected(exSel, exMain)); verify("Table select columns limit 10", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'Table').out('__Table.db').limit(25).toList()"; String exMain2 = "g.V().has('__typeName', 'Table').out('__Table.db').limit(25).toList()";
verify("Table select db.name", getExpected(exSel, exMain2)); verify("Table select db", getExpected(exSel, exMain2));
String exMain3 = "g.V().has('__typeName', 'Table').out('__Table.db').limit(25).toList()";
verify("Table select db.name", getExpected(exSel1, exMain3));
} }
@Test(enabled = false) @Test(enabled = false)
...@@ -117,7 +123,7 @@ public class QueryProcessorTest { ...@@ -117,7 +123,7 @@ public class QueryProcessorTest {
String exSel = "def f(r){ return [['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique(); }"; String exSel = "def f(r){ return [['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique(); }";
String exMain = "g.V().has('__typeName', 'DB').as('d').order().by('DB.owner)').limit(25).toList()"; String exMain = "g.V().has('__typeName', 'DB').as('d').order().by('DB.owner').limit(25).toList()";
verify("DB as d select d.name, d.owner orderby (d.owner) limit 25", getExpected(exSel, exMain)); verify("DB as d select d.name, d.owner orderby (d.owner) limit 25", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'Table').and(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.createTime', gt('1388563200000'))).order().by('Table.createTime').limit(25).toList()"; String exMain2 = "g.V().has('__typeName', 'Table').and(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.createTime', gt('1388563200000'))).order().by('Table.createTime').limit(25).toList()";
...@@ -151,7 +157,7 @@ public class QueryProcessorTest { ...@@ -151,7 +157,7 @@ public class QueryProcessorTest {
verify("Table where Asset.name like \"Tab*\"", verify("Table where Asset.name like \"Tab*\"",
"g.V().has('__typeName', 'Table').has('Table.name', org.janusgraph.core.attribute.Text.textRegex(\"Tab.*\")).limit(25).toList()"); "g.V().has('__typeName', 'Table').has('Table.name', org.janusgraph.core.attribute.Text.textRegex(\"Tab.*\")).limit(25).toList()");
verify("from Table where (db.name = \"Reporting\")", verify("from Table where (db.name = \"Reporting\")",
"g.V().has('__typeName', 'Table').out('__Table.db').has('DB.name', eq(\"Reporting\")).in('__Table.db').limit(25).toList()"); "g.V().has('__typeName', 'Table').out('__Table.db').has('DB.name', eq(\"Reporting\")).dedup().in('__Table.db').limit(25).toList()");
} }
@Test @Test
...@@ -169,14 +175,6 @@ public class QueryProcessorTest { ...@@ -169,14 +175,6 @@ public class QueryProcessorTest {
} }
@Test @Test
public void multipleWhereClauses() {
String exSel = "def f(r){ return [['c.owner','c.name','c.dataType']].plus(r.collect({[it.value('Column.owner'),it.value('Column.name'),it.value('Column.dataType')]})).unique(); }";
String exMain = "g.V().has('__typeName', 'Table').has('Table.name', eq(\"sales_fact\")).out('__Table.columns').as('c').limit(25).toList()";
verify("Table where name=\"sales_fact\", columns as c select c.owner, c.name, c.dataType", getExpected(exSel, exMain));
;
}
@Test
public void subType() { public void subType() {
String exMain = "g.V().has('__typeName', within('Asset','Table')).limit(25).toList()"; String exMain = "g.V().has('__typeName', within('Asset','Table')).limit(25).toList()";
String exSel = "def f(r){ return [['name','owner']].plus(r.collect({[it.value('Asset.name'),it.value('Asset.owner')]})).unique(); }"; String exSel = "def f(r){ return [['name','owner']].plus(r.collect({[it.value('Asset.name'),it.value('Asset.owner')]})).unique(); }";
...@@ -253,7 +251,7 @@ public class QueryProcessorTest { ...@@ -253,7 +251,7 @@ public class QueryProcessorTest {
verify("hive_db where hive_db.name='Reporting' and hive_db.createTime < '2017-12-12T02:35:58.440Z'", verify("hive_db where hive_db.name='Reporting' and hive_db.createTime < '2017-12-12T02:35:58.440Z'",
"g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', eq('Reporting')),__.has('hive_db.createTime', lt('1513046158440'))).limit(25).toList()"); "g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', eq('Reporting')),__.has('hive_db.createTime', lt('1513046158440'))).limit(25).toList()");
verify("Table where db.name='Sales' and db.clusterName='cl1'", verify("Table where db.name='Sales' and db.clusterName='cl1'",
"g.V().has('__typeName', 'Table').and(__.out('__Table.db').has('DB.name', eq('Sales')).in('__Table.db'),__.out('__Table.db').has('DB.clusterName', eq('cl1')).in('__Table.db')).limit(25).toList()"); "g.V().has('__typeName', 'Table').and(__.out('__Table.db').has('DB.name', eq('Sales')).dedup().in('__Table.db'),__.out('__Table.db').has('DB.clusterName', eq('cl1')).dedup().in('__Table.db')).limit(25).toList()");
} }
private void verify(String dsl, String expectedGremlin) { private void verify(String dsl, String expectedGremlin) {
...@@ -288,16 +286,15 @@ public class QueryProcessorTest { ...@@ -288,16 +286,15 @@ public class QueryProcessorTest {
} }
private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) { private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) {
AtlasTypeRegistry registry = mock(AtlasTypeRegistry.class); AtlasTypeRegistry registry = mock(AtlasTypeRegistry.class);
org.apache.atlas.query.Lookup lookup = new TestLookup(errorList, registry); org.apache.atlas.query.Lookup lookup = new TestLookup(errorList, registry);
QueryProcessor.Context context = new QueryProcessor.Context(errorList, lookup); GremlinQueryComposer.Context context = new GremlinQueryComposer.Context(errorList, lookup);
QueryProcessor queryProcessor = new QueryProcessor(lookup, context); GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(lookup, context);
DSLVisitor qv = new DSLVisitor(queryProcessor); DSLVisitor qv = new DSLVisitor(gremlinQueryComposer);
qv.visit(queryContext); qv.visit(queryContext);
queryProcessor.close();
String s = queryProcessor.getText(); String s = gremlinQueryComposer.get();
assertTrue(StringUtils.isNotEmpty(s)); assertTrue(StringUtils.isNotEmpty(s));
return s; return s;
} }
...@@ -326,7 +323,7 @@ public class QueryProcessorTest { ...@@ -326,7 +323,7 @@ public class QueryProcessorTest {
} }
@Override @Override
public String getQualifiedName(QueryProcessor.Context context, String name) { public String getQualifiedName(GremlinQueryComposer.Context context, String name) {
if(name.contains(".")) if(name.contains("."))
return name; return name;
...@@ -334,7 +331,7 @@ public class QueryProcessorTest { ...@@ -334,7 +331,7 @@ public class QueryProcessorTest {
} }
@Override @Override
public boolean isPrimitive(QueryProcessor.Context context, String attributeName) { public boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName) {
return attributeName.equals("name") || return attributeName.equals("name") ||
attributeName.equals("owner") || attributeName.equals("owner") ||
attributeName.equals("createdTime") || attributeName.equals("createdTime") ||
...@@ -343,7 +340,7 @@ public class QueryProcessorTest { ...@@ -343,7 +340,7 @@ public class QueryProcessorTest {
} }
@Override @Override
public String getRelationshipEdgeLabel(QueryProcessor.Context context, String attributeName) { public String getRelationshipEdgeLabel(GremlinQueryComposer.Context context, String attributeName) {
if (attributeName.equalsIgnoreCase("columns")) if (attributeName.equalsIgnoreCase("columns"))
return "__Table.columns"; return "__Table.columns";
if (attributeName.equalsIgnoreCase("db")) if (attributeName.equalsIgnoreCase("db"))
...@@ -353,29 +350,29 @@ public class QueryProcessorTest { ...@@ -353,29 +350,29 @@ public class QueryProcessorTest {
} }
@Override @Override
public boolean hasAttribute(QueryProcessor.Context context, String typeName) { public boolean hasAttribute(GremlinQueryComposer.Context context, String typeName) {
return (context.getActiveTypeName().equals("Table") && typeName.equals("db")) || return (context.getActiveTypeName().equals("Table") && typeName.equals("db")) ||
(context.getActiveTypeName().equals("Table") && typeName.equals("columns")); (context.getActiveTypeName().equals("Table") && typeName.equals("columns"));
} }
@Override @Override
public boolean doesTypeHaveSubTypes(QueryProcessor.Context context) { public boolean doesTypeHaveSubTypes(GremlinQueryComposer.Context context) {
return context.getActiveTypeName().equalsIgnoreCase("Asset"); return context.getActiveTypeName().equalsIgnoreCase("Asset");
} }
@Override @Override
public String getTypeAndSubTypes(QueryProcessor.Context context) { public String getTypeAndSubTypes(GremlinQueryComposer.Context context) {
String[] str = new String[]{"'Asset'", "'Table'"}; String[] str = new String[]{"'Asset'", "'Table'"};
return StringUtils.join(str, ","); return StringUtils.join(str, ",");
} }
@Override @Override
public boolean isTraitType(QueryProcessor.Context context) { public boolean isTraitType(GremlinQueryComposer.Context context) {
return context.getActiveTypeName().equals("PII") || context.getActiveTypeName().equals("Dimension"); return context.getActiveTypeName().equals("PII") || context.getActiveTypeName().equals("Dimension");
} }
@Override @Override
public String getTypeFromEdge(QueryProcessor.Context context, String item) { public String getTypeFromEdge(GremlinQueryComposer.Context context, String item) {
if(context.getActiveTypeName().equals("DB") && item.equals("Table")) { if(context.getActiveTypeName().equals("DB") && item.equals("Table")) {
return "Table"; return "Table";
} else if(context.getActiveTypeName().equals("Table") && item.equals("Column")) { } else if(context.getActiveTypeName().equals("Table") && item.equals("Column")) {
...@@ -389,7 +386,7 @@ public class QueryProcessorTest { ...@@ -389,7 +386,7 @@ public class QueryProcessorTest {
} }
@Override @Override
public boolean isDate(QueryProcessor.Context context, String attributeName) { public boolean isDate(GremlinQueryComposer.Context context, String attributeName) {
return attributeName.equals("createdTime") || return attributeName.equals("createdTime") ||
attributeName.equals("createTime"); attributeName.equals("createTime");
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment