Commit b5989c84 by Ashutosh Mestry Committed by apoorvnaik

Bugfixes for ATLAS-2333, ATLAS-2334

ATLAS-2229: ValueArray clause implementation. ATLAS-2229: DSL valueArray clause, added unit tests. ATLAS-2229: DSLVisitor refactoring. ATLAS-2229: Fixed REST call with classification. ATLAS-2229: GroupBy, Select, Min, Max, Count ATLAS-2229: Additional unit tests for min, max, count. Updated test framework. ATLAS-2229: Handling case where query is invalid. ATLAS-2229: Updates related to boolean value handling. ATLAS-2229: Count, Sum, Max, Min operations implemented. ATLAS-2229: Addressed review comments. Signed-off-by: 's avatarapoorvnaik <apoorvnaik@apache.org>
parent ab316c9b
......@@ -19,7 +19,6 @@ package org.apache.atlas.discovery;
import com.google.common.annotations.VisibleForTesting;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasConfiguration;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
import org.apache.atlas.SortOrder;
......@@ -36,6 +35,7 @@ import org.apache.atlas.model.profile.AtlasUserSavedSearch;
import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.query.GremlinQuery;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper;
......@@ -67,21 +67,16 @@ import javax.inject.Inject;
import javax.script.Bindings;
import javax.script.ScriptEngine;
import javax.script.ScriptException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.atlas.AtlasErrorCode.*;
import java.util.*;
import static org.apache.atlas.AtlasErrorCode.CLASSIFICATION_NOT_FOUND;
import static org.apache.atlas.AtlasErrorCode.DISCOVERY_QUERY_FAILED;
import static org.apache.atlas.AtlasErrorCode.UNKNOWN_TYPENAME;
import static org.apache.atlas.SortOrder.ASCENDING;
import static org.apache.atlas.SortOrder.DESCENDING;
import static org.apache.atlas.model.TypeCategory.*;
import static org.apache.atlas.model.TypeCategory.ARRAY;
import static org.apache.atlas.model.TypeCategory.MAP;
import static org.apache.atlas.model.TypeCategory.OBJECT_ID_TYPE;
import static org.apache.atlas.model.instance.AtlasEntity.Status.ACTIVE;
import static org.apache.atlas.model.instance.AtlasEntity.Status.DELETED;
import static org.apache.atlas.repository.graph.GraphHelper.EDGE_LABEL_PREFIX;
......@@ -177,7 +172,7 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
public AtlasSearchResult searchUsingFullTextQuery(String fullTextQuery, boolean excludeDeletedEntities, int limit, int offset)
throws AtlasBaseException {
AtlasSearchResult ret = new AtlasSearchResult(fullTextQuery, AtlasQueryType.FULL_TEXT);
QueryParams params = validateSearchParams(limit, offset);
QueryParams params = QueryParams.getNormalizedParams(limit, offset);
AtlasIndexQuery idxQuery = toAtlasIndexQuery(fullTextQuery);
if (LOG.isDebugEnabled()) {
......@@ -200,7 +195,7 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
LOG.debug("Executing basic search query: {} with type: {} and classification: {}", query, typeName, classification);
}
final QueryParams params = validateSearchParams(limit, offset);
final QueryParams params = QueryParams.getNormalizedParams(limit, offset);
Set<String> typeNames = null;
Set<String> classificationNames = null;
String attrQualifiedName = null;
......@@ -417,7 +412,7 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
@GraphTransaction
public AtlasSearchResult searchWithParameters(SearchParameters searchParameters) throws AtlasBaseException {
AtlasSearchResult ret = new AtlasSearchResult(searchParameters);
final QueryParams params = validateSearchParams(searchParameters.getLimit(),searchParameters.getOffset());
final QueryParams params = QueryParams.getNormalizedParams(searchParameters.getLimit(),searchParameters.getOffset());
searchParameters.setLimit(params.limit());
searchParameters.setOffset(params.offset());
......@@ -554,7 +549,7 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
}
}
QueryParams params = validateSearchParams(limit, offset);
QueryParams params = QueryParams.getNormalizedParams(limit, offset);
ScriptEngine scriptEngine = graph.getGremlinScriptEngine();
Bindings bindings = scriptEngine.createBindings();
Set<String> states = getEntityStates();
......@@ -675,8 +670,8 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
}
private GremlinQuery toGremlinQuery(String query, int limit, int offset) throws AtlasBaseException {
QueryParams params = validateSearchParams(limit, offset);
GremlinQuery gremlinQuery = new AtlasDSL.Translator(AtlasDSL.Parser.parse(query), typeRegistry, params.offset(), params.limit()).translate();
QueryParams params = QueryParams.getNormalizedParams(limit, offset);
GremlinQuery gremlinQuery = new AtlasDSL.Translator(query, typeRegistry, params.offset(), params.limit()).translate();
if (LOG.isDebugEnabled()) {
LOG.debug("Translated Gremlin Query: {}", gremlinQuery.queryStr());
......@@ -685,23 +680,6 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
return gremlinQuery;
}
private QueryParams validateSearchParams(int limitParam, int offsetParam) {
int defaultLimit = AtlasConfiguration.SEARCH_DEFAULT_LIMIT.getInt();
int maxLimit = AtlasConfiguration.SEARCH_MAX_LIMIT.getInt();
int limit = defaultLimit;
if (limitParam > 0 && limitParam <= maxLimit) {
limit = limitParam;
}
int offset = 0;
if (offsetParam > 0) {
offset = offsetParam;
}
return new QueryParams(limit, offset);
}
private AtlasIndexQuery toAtlasIndexQuery(String fullTextQuery) {
String graphQuery = String.format("v.\"%s\":(%s)", Constants.ENTITY_TEXT_PROPERTY_KEY, fullTextQuery);
return graph.indexQuery(Constants.FULLTEXT_INDEX, graphQuery);
......@@ -709,8 +687,6 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
private AttributeSearchResult toAttributesResult(List results, GremlinQuery query) {
AttributeSearchResult ret = new AttributeSearchResult();
// List<String> names = extractNames(results);
// List<List<Object>> values = extractValues(results);
List<String> names = (List<String>) results.get(0);
List<List<Object>> values = extractValues(results.subList(1, results.size()));
......@@ -920,6 +896,8 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
@Override
public String getDslQueryUsingTypeNameClassification(String query, String typeName, String classification) {
final String whereDSLKeyword = "where";
final String isaDSLKeyword = "isa";
final String isDSLKeyword = "is";
final String limitDSLKeyword = "limit";
final String whereFormat = whereDSLKeyword + " %s";
......@@ -928,7 +906,10 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
if (StringUtils.isNotEmpty(typeName)) {
if(StringUtils.isNotEmpty(query)) {
String s = query.toLowerCase();
if(!s.startsWith(whereDSLKeyword) && !s.startsWith(limitDSLKeyword)) {
if(!s.startsWith(whereDSLKeyword) &&
!s.startsWith(limitDSLKeyword) &&
!s.startsWith(isaDSLKeyword) &&
!s.startsWith(isDSLKeyword)) {
queryStr = String.format(whereFormat, query);
}
}
......@@ -939,7 +920,7 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
if (StringUtils.isNotEmpty(classification)) {
// isa works with a type name only - like hive_column isa PII; it doesn't work with more complex query
if (StringUtils.isEmpty(query)) {
queryStr += (" isa " + classification);
queryStr += String.format("%s %s %s", queryStr, isaDSLKeyword, classification);
}
}
return queryStr;
......
......@@ -53,7 +53,7 @@ public class AtlasDSL {
return RESERVED_KEYWORDS.contains(word);
}
public static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
private static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
AtlasDSLParser.QueryContext ret;
try {
......@@ -88,7 +88,6 @@ public class AtlasDSL {
@Override
public void syntaxError(final Recognizer<?, ?> recognizer, final Object offendingSymbol, final int line, final int charPositionInLine, final String msg, final RecognitionException e) {
// TODO: Capture multiple datapoints
isValid = false;
errorMsg = msg;
}
......@@ -108,7 +107,11 @@ public class AtlasDSL {
private final int offset;
private final int limit;
public Translator(final AtlasDSLParser.QueryContext queryContext, AtlasTypeRegistry typeRegistry, int offset, int limit) {
public Translator(String query, AtlasTypeRegistry typeRegistry, int offset, int limit) throws AtlasBaseException {
this(Parser.parse(query), typeRegistry, offset, limit);
}
private Translator(final AtlasDSLParser.QueryContext queryContext, AtlasTypeRegistry typeRegistry, int offset, int limit) {
this.queryContext = queryContext;
this.typeRegistry = typeRegistry;
this.offset = offset;
......@@ -116,20 +119,46 @@ public class AtlasDSL {
}
public GremlinQuery translate() {
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(typeRegistry);
QueryMetadata queryMetadata = new QueryMetadata(queryContext);
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(typeRegistry, queryMetadata, limit, offset);
DSLVisitor dslVisitor = new DSLVisitor(gremlinQueryComposer);
if (offset >= 0) {
if (!gremlinQueryComposer.hasLimitOffset()) {
gremlinQueryComposer.addLimit(Integer.toString(limit), Integer.toString(offset));
queryContext.accept(dslVisitor);
return new GremlinQuery(gremlinQueryComposer.get(), queryMetadata.hasSelect());
}
}
DSLVisitor dslVisitor = new DSLVisitor(gremlinQueryComposer);
public static class QueryMetadata {
private boolean hasSelect;
private boolean hasGroupBy;
private boolean hasOrderBy;
private boolean hasLimitOffset;
public QueryMetadata(AtlasDSLParser.QueryContext queryContext) {
hasSelect = queryContext != null && queryContext.selectClause() != null;
hasGroupBy = queryContext != null && queryContext.groupByExpression() != null;
hasOrderBy = queryContext != null && queryContext.orderByExpr() != null;
hasLimitOffset = queryContext != null && queryContext.limitOffset() != null;
}
// Now process the Query and collect translation in
queryContext.accept(dslVisitor);
public boolean hasSelect() {
return hasSelect;
}
public boolean hasGroupBy() {
return hasGroupBy;
}
public boolean hasOrderBy() {
return hasOrderBy;
}
public boolean hasLimitOffset() {
return hasLimitOffset;
}
return new GremlinQuery(gremlinQueryComposer.get(), gremlinQueryComposer.hasSelect());
public boolean needTransformation() {
return (hasGroupBy && hasSelect && hasOrderBy) || (hasGroupBy && hasOrderBy) || hasSelect;
}
}
}
......@@ -66,7 +66,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
LOG.debug("=> DSLVisitor.visitLimitOffset({})", ctx);
}
gremlinQueryComposer.addLimit(ctx.limitClause().NUMBER().toString(),
gremlinQueryComposer.addLimit(ctx.limitClause().NUMBER().getText(),
(ctx.offsetClause() == null ? "0" : ctx.offsetClause().NUMBER().getText()));
return super.visitLimitOffset(ctx);
}
......@@ -85,7 +85,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
String[] items = new String[ctx.selectExpression().size()];
String[] labels = new String[ctx.selectExpression().size()];
GremlinQueryComposer.SelectExprMetadata selectExprMetadata = new GremlinQueryComposer.SelectExprMetadata();
SelectClauseComposer selectClauseComposer = new SelectClauseComposer();
for (int i = 0; i < ctx.selectExpression().size(); i++) {
SelectExpressionContext selectExpression = ctx.selectExpression(i);
......@@ -99,24 +99,24 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
if (Objects.nonNull(countClause)) {
items[i] = "count";
selectExprMetadata.setCountIdx(i);
selectClauseComposer.setCountIdx(i);
} else if (Objects.nonNull(sumClause)) {
items[i] = sumClause.expr().getText();
selectExprMetadata.setSumIdx(i);
selectClauseComposer.setSumIdx(i);
} else if (Objects.nonNull(minClause)) {
items[i] = minClause.expr().getText();
selectExprMetadata.setMinIdx(i);
selectClauseComposer.setMinIdx(i);
} else if (Objects.nonNull(maxClause)) {
items[i] = maxClause.expr().getText();
selectExprMetadata.setMaxIdx(i);
selectClauseComposer.setMaxIdx(i);
} else {
items[i] = selectExpression.expr().getText();
}
}
selectExprMetadata.setItems(items);
selectExprMetadata.setLabels(labels);
gremlinQueryComposer.addSelect(selectExprMetadata);
selectClauseComposer.setItems(items);
selectClauseComposer.setLabels(labels);
gremlinQueryComposer.addSelect(selectClauseComposer);
}
return super.visitSelectExpr(ctx);
}
......@@ -251,8 +251,16 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
if (comparisonClause != null) {
String lhs = comparisonClause.arithE(0).getText();
String op = comparisonClause.operator().getText().toUpperCase();
String rhs = comparisonClause.arithE(1).getText();
String op, rhs;
AtomEContext atomECtx = comparisonClause.arithE(1).multiE().atomE();
if(atomECtx.literal() == null ||
(atomECtx.literal() != null && atomECtx.literal().valueArray() == null)) {
op = comparisonClause.operator().getText().toUpperCase();
rhs = comparisonClause.arithE(1).getText();
} else {
op = "in";
rhs = getInClause(atomECtx);
}
gremlinQueryComposer.addWhere(lhs, op, rhs);
} else {
......@@ -260,4 +268,16 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
}
}
}
private String getInClause(AtomEContext atomEContext) {
StringBuilder sb = new StringBuilder();
ValueArrayContext valueArrayContext = atomEContext.literal().valueArray();
int startIdx = 1;
int endIdx = valueArrayContext.children.size() - 1;
for (int i = startIdx; i < endIdx; i++) {
sb.append(valueArrayContext.getChild(i));
}
return sb.toString();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
enum GremlinClause {
AS("as('%s')"),
DEDUP("dedup()"),
G("g"),
GROUP_BY("group().by('%s')"),
HAS("has('%s', %s)"),
HAS_OPERATOR("has('%s', %s(%s))"),
HAS_PROPERTY("has('%s')"),
WHERE("where(%s)"),
HAS_NOT_PROPERTY("hasNot('%s')"),
HAS_TYPE("has('__typeName', '%s')"),
HAS_TYPE_WITHIN("has('__typeName', within(%s))"),
HAS_WITHIN("has('%s', within(%s))"),
IN("in('%s')"),
OR("or(%s)"),
AND("and(%s)"),
NESTED_START("__"),
NESTED_HAS_OPERATOR("has('%s', %s(%s))"),
LIMIT("limit(%s)"),
ORDER_BY("order().by('%s')"),
ORDER_BY_DESC("order().by('%s', decr)"),
OUT("out('%s')"),
RANGE("range(%s, %s + %s)"),
SELECT("select('%s')"),
TO_LIST("toList()"),
TEXT_CONTAINS("has('%s', org.janusgraph.core.attribute.Text.textRegex(%s))"),
TEXT_PREFIX("has('%s', org.janusgraph.core.attribute.Text.textPrefix(%s))"),
TEXT_SUFFIX("has('%s', org.janusgraph.core.attribute.Text.textRegex(\".*\" + %s))"),
TRAIT("has('__traitNames', within('%s'))"),
SELECT_NOOP_FN("def f(r){ r }; "),
SELECT_FN("def f(r){ t=[[%s]]; %s r.each({t.add([%s])}); t.unique(); }; "),
SELECT_ONLY_AGG_FN("def f(r){ t=[[%s]]; %s t.add([%s]); t;}; "),
SELECT_ONLY_AGG_GRP_FN("def f(l){ t=[[%s]]; l.get(0).each({k,r -> L:{ %s t.add([%s]); } }); t; }; "),
SELECT_MULTI_ATTR_GRP_FN("def f(l){ t=[[%s]]; l.get(0).each({k,r -> L:{ %s r.each({t.add([%s])}) } }); t.unique(); }; "),
INLINE_ASSIGNMENT("def %s=%s;"),
INLINE_LIST_RANGE("[%s..<%s]"),
INLINE_COUNT("r.size()"),
INLINE_SUM("r.sum({it.value('%s')})"),
INLINE_MAX("r.max({it.value('%s')}).value('%s')"),
INLINE_MIN("r.min({it.value('%s')}).value('%s')"),
INLINE_GET_PROPERTY("it.value('%s')"),
INLINE_TRANSFORM_CALL("f(%s)"),
V("V()"),
VALUE_MAP("valueMap(%s)");
private final String template;
GremlinClause(String template) {
this.template = template;
}
String get() {
return template;
}
String get(String... args) {
return (args == null || args.length == 0) ? template : String.format(template, args);
}
}
......@@ -18,15 +18,8 @@
package org.apache.atlas.query;
import com.google.common.annotations.VisibleForTesting;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.type.AtlasArrayType;
import org.apache.atlas.type.AtlasBuiltInTypes;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasStructType;
import org.apache.atlas.type.AtlasStructType.AtlasAttribute;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.commons.lang.StringUtils;
......@@ -36,13 +29,11 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.StringJoiner;
import java.util.stream.Collectors;
import java.util.stream.Stream;
......@@ -56,46 +47,42 @@ public class GremlinQueryComposer {
private final GremlinClauseList queryClauses = new GremlinClauseList();
private final Lookup lookup;
private final boolean isNestedQuery;
private final AtlasDSL.QueryMetadata queryMetadata;
private int providedLimit = DEFAULT_QUERY_RESULT_LIMIT;
private int providedOffset = DEFAULT_QUERY_RESULT_OFFSET;
private boolean hasSelect = false;
private boolean isSelectNoop = false;
private boolean hasGroupBy = false;
private boolean hasOrderBy = false;
private boolean hasLimitOffset = false;
private String offset = null;
private String limit = null;
private Context context;
private SelectExprMetadata selectExprMetadata;
@Inject
public GremlinQueryComposer(AtlasTypeRegistry typeRegistry) {
public GremlinQueryComposer(AtlasTypeRegistry typeRegistry, final AtlasDSL.QueryMetadata qmd) {
isNestedQuery = false;
lookup = new RegistryBasedLookup(errorList, typeRegistry);
context = new Context(errorList, lookup);
this.context = new Context(errorList, lookup);
queryMetadata = qmd;
init();
}
public GremlinQueryComposer(AtlasTypeRegistry typeRegistry, int limit, int offset) {
this(typeRegistry);
public GremlinQueryComposer(AtlasTypeRegistry typeRegistry, final AtlasDSL.QueryMetadata queryMetadata, int limit, int offset) {
this(typeRegistry, queryMetadata);
providedLimit = limit;
providedOffset = offset < 0 ? DEFAULT_QUERY_RESULT_OFFSET : offset;
}
@VisibleForTesting
GremlinQueryComposer(Lookup lookup, Context context) {
GremlinQueryComposer(Lookup lookup, Context context, final AtlasDSL.QueryMetadata qmd) {
this.isNestedQuery = false;
this.lookup = lookup;
this.context = context;
this.queryMetadata = qmd;
init();
}
public GremlinQueryComposer(Lookup registryLookup, boolean isNestedQuery) {
public GremlinQueryComposer(Lookup registryLookup, final AtlasDSL.QueryMetadata qmd, boolean isNestedQuery) {
this.isNestedQuery = isNestedQuery;
this.lookup = registryLookup;
this.queryMetadata = qmd;
init();
}
......@@ -137,7 +124,6 @@ public class GremlinQueryComposer {
IdentifierHelper.getQualifiedName(lookup, context, attribute));
}
public void addFromIsA(String typeName, String traitName) {
addFrom(typeName);
add(GremlinClause.TRAIT, traitName);
......@@ -152,99 +138,85 @@ public class GremlinQueryComposer {
SearchParameters.Operator op = SearchParameters.Operator.fromString(operator);
IdentifierHelper.Advice org = null;
IdentifierHelper.Advice lhsI = getAdvice(lhs);
if(!lhsI.isPrimitive()) {
if (!lhsI.isPrimitive()) {
introduceType(lhsI);
org = lhsI;
lhsI = getAdvice(lhs);
}
if(lhsI.isDate()) {
if (lhsI.isDate()) {
rhs = parseDate(rhs);
}
rhs = addQuotesIfNecessary(rhs);
if(op == SearchParameters.Operator.LIKE) {
add(GremlinClause.TEXT_CONTAINS, lhsI.getQualifiedName(), rhs.replace("*", ".*").replace('?', '.'));
} else if(op == SearchParameters.Operator.IN) {
if (op == SearchParameters.Operator.LIKE) {
add(GremlinClause.TEXT_CONTAINS, lhsI.getQualifiedName(), getFixedRegEx(rhs));
} else if (op == SearchParameters.Operator.IN) {
add(GremlinClause.HAS_OPERATOR, lhsI.getQualifiedName(), "within", rhs);
} else {
add(GremlinClause.HAS_OPERATOR, lhsI.getQualifiedName(), op.getSymbols()[1], rhs);
}
if(org != null && !org.isPrimitive() && org.getIntroduceType()) {
if (org != null && org.getIntroduceType()) {
add(GremlinClause.DEDUP);
add(GremlinClause.IN, org.getEdgeLabel());
context.registerActive(currentType);
}
}
private String getFixedRegEx(String rhs) {
return rhs.replace("*", ".*").replace('?', '.');
}
public void addAndClauses(List<String> clauses) {
queryClauses.add(GremlinClause.AND, StringUtils.join(clauses, ','));
queryClauses.add(GremlinClause.AND, String.join(",", clauses));
}
public void addOrClauses(List<String> clauses) {
queryClauses.add(GremlinClause.OR, StringUtils.join(clauses, ','));
queryClauses.add(GremlinClause.OR, String.join(",", clauses));
}
public void addSelect(SelectExprMetadata selectExprMetadata) {
String[] items = selectExprMetadata.getItems();
String[] labels = selectExprMetadata.getLabels();
if (LOG.isDebugEnabled()) {
LOG.debug("addSelect(items.length={})", items != null ? items.length : 0);
public void addSelect(SelectClauseComposer selectClauseComposer) {
process(selectClauseComposer);
if (!(queryMetadata.hasOrderBy() && queryMetadata.hasGroupBy())) {
addSelectTransformation(selectClauseComposer);
}
this.context.setSelectClauseComposer(selectClauseComposer);
}
if (items != null) {
for (int i = 0; i < items.length; i++) {
IdentifierHelper.Advice ia = getAdvice(items[i]);
private void process(SelectClauseComposer scc) {
if (LOG.isDebugEnabled()) {
LOG.debug("addSelect(items.length={})", scc.getItems() != null ? scc.getItems().length : 0);
}
if(!labels[i].equals(items[i])) {
context.aliasMap.put(labels[i], ia.getQualifiedName());
if (scc.getItems() == null) {
return;
}
if (i == selectExprMetadata.getCountIdx()) {
items[i] = GremlinClause.INLINE_COUNT.get();
} else if (i == selectExprMetadata.getMinIdx()) {
items[i] = GremlinClause.INLINE_MIN.get(ia.getQualifiedName(), ia.getQualifiedName());
} else if (i == selectExprMetadata.getMaxIdx()) {
items[i] = GremlinClause.INLINE_MAX.get(ia.getQualifiedName(), ia.getQualifiedName());
} else if (i == selectExprMetadata.getSumIdx()) {
items[i] = GremlinClause.INLINE_SUM.get(ia.getQualifiedName(), ia.getQualifiedName());
} else {
if (!ia.isPrimitive() && ia.getIntroduceType()) {
add(GremlinClause.OUT, ia.getEdgeLabel());
context.registerActive(ia.getTypeName());
for (int i = 0; i < scc.getItems().length; i++) {
IdentifierHelper.Advice ia = getAdvice(scc.getItem(i));
int dotIdx = ia.get().indexOf(".");
if (dotIdx != -1) {
IdentifierHelper.Advice iax = getAdvice(ia.get());
items[i] = GremlinClause.INLINE_GET_PROPERTY.get(iax.getQualifiedName());
} else {
isSelectNoop = true;
}
} else {
items[i] = GremlinClause.INLINE_GET_PROPERTY.get(ia.getQualifiedName());
}
if (!scc.getItem(i).equals(scc.getLabel(i))) {
context.addAlias(scc.getLabel(i), ia.getQualifiedName());
}
if (scc.updateAsApplicable(i, ia.getQualifiedName())) {
continue;
}
// If GroupBy clause exists then the query spits out a List<Map<String, List<AtlasVertex>>> otherwise the query returns List<AtlasVertex>
// Different transformations are needed for DSLs with groupby and w/o groupby
GremlinClause transformationFn;
if (isSelectNoop) {
transformationFn = GremlinClause.SELECT_EXPR_NOOP_FN;
if (introduceType(ia)) {
scc.isSelectNoop = !ia.hasParts();
if(ia.hasParts()) {
scc.assign(i, getAdvice(ia.get()).getQualifiedName(), GremlinClause.INLINE_GET_PROPERTY);
}
} else {
transformationFn = hasGroupBy ? GremlinClause.SELECT_WITH_GRPBY_HELPER_FN : GremlinClause.SELECT_EXPR_HELPER_FN;
scc.assign(i, ia.getQualifiedName(), GremlinClause.INLINE_GET_PROPERTY);
}
queryClauses.add(0, transformationFn, getJoinedQuotedStr(labels), String.join(",", items));
queryClauses.add(GremlinClause.INLINE_TRANSFORM_CALL);
hasSelect = true;
this.selectExprMetadata = selectExprMetadata;
}
}
public GremlinQueryComposer createNestedProcessor() {
GremlinQueryComposer qp = new GremlinQueryComposer(lookup, true);
GremlinQueryComposer qp = new GremlinQueryComposer(lookup, queryMetadata, true);
qp.context = this.context;
return qp;
}
......@@ -259,12 +231,12 @@ public class GremlinQueryComposer {
context.registerAlias(alias);
}
public void addAsClause(String stepName) {
public void addAsClause(String alias) {
if (LOG.isDebugEnabled()) {
LOG.debug("addAsClause(stepName={})", stepName);
LOG.debug("addAsClause(stepName={})", alias);
}
add(GremlinClause.AS, stepName);
add(GremlinClause.AS, alias);
}
public void addGroupBy(String item) {
......@@ -273,7 +245,6 @@ public class GremlinQueryComposer {
}
addGroupByClause(item);
hasGroupBy = true;
}
public void addLimit(String limit, String offset) {
......@@ -281,47 +252,48 @@ public class GremlinQueryComposer {
LOG.debug("addLimit(limit={}, offset={})", limit, offset);
}
this.limit = limit;
this.offset = offset;
if (offset.equalsIgnoreCase("0")) {
add(GremlinClause.LIMIT, limit);
} else {
addRangeClause(offset, limit);
}
}
hasLimitOffset = true;
public void addDefaultLimit() {
addLimit(Integer.toString(providedLimit), Integer.toString(providedOffset));
}
public String get() {
close();
String ret;
String[] items = new String[queryClauses.size()];
boolean needTransformation = needTransformation();
int startIdx = needTransformation ? 1 : 0;
int endIdx = needTransformation ? queryClauses.size() - 1 : queryClauses.size();
for (int i = startIdx; i < endIdx; i++) {
items[i] = queryClauses.getValue(i);
String items[] = getFormattedClauses(queryMetadata.needTransformation());
return queryMetadata.needTransformation() ?
getTransformedClauses(items) :
String.join(".", items);
}
if (needTransformation) {
private String getTransformedClauses(String[] items) {
String ret;
String body = String.join(".", Stream.of(items).filter(Objects::nonNull).collect(Collectors.toList()));
String inlineFn = queryClauses.getValue(queryClauses.size() - 1);
String funCall = String.format(inlineFn, body);
ret = queryClauses.getValue(0) + funCall;
if (isNestedQuery) {
ret = String.join(".", queryClauses.getValue(0), funCall);
} else {
ret = String.join(".", items);
}
if (LOG.isDebugEnabled()) {
LOG.debug("get() => {}", ret);
ret = queryClauses.getValue(0) + funCall;
}
return ret;
}
public boolean hasSelect() {
return hasSelect;
}
private String[] getFormattedClauses(boolean needTransformation) {
String[] items = new String[queryClauses.size()];
int startIdx = needTransformation ? 1 : 0;
int endIdx = needTransformation ? queryClauses.size() - 1 : queryClauses.size();
public boolean hasLimitOffset() {
return hasLimitOffset;
for (int i = startIdx; i < endIdx; i++) {
items[i] = queryClauses.getValue(i);
}
return items;
}
public void addOrderBy(String name, boolean isDesc) {
......@@ -329,34 +301,47 @@ public class GremlinQueryComposer {
LOG.debug("addOrderBy(name={}, isDesc={})", name, isDesc);
}
AtlasAttribute attribute = ((AtlasStructType) context.getActiveType()).getAttribute(getAttributeName(name));
if (hasGroupBy) {
GremlinClause transformationFn = isDesc ? GremlinClause.GRPBY_ORDERBY_DESC_HELPER_FN : GremlinClause.GRPBY_ORDERBY_ASC_HELPER_FN;
add(0, transformationFn, attribute.getQualifiedName(), attribute.getQualifiedName());
add(GremlinClause.INLINE_TRANSFORM_CALL);
IdentifierHelper.Advice ia = getAdvice(name);
if (queryMetadata.hasSelect() && queryMetadata.hasGroupBy()) {
addOrderByClause(ia.getQualifiedName(), isDesc);
moveToLast(GremlinClause.GROUP_BY);
addSelectTransformation(this.context.selectClauseComposer);
} else if (queryMetadata.hasGroupBy()) {
addOrderByClause(ia.getQualifiedName(), isDesc);
moveToLast(GremlinClause.GROUP_BY);
} else {
addOrderByClause(attribute.getQualifiedName(), isDesc);
addOrderByClause(ia.getQualifiedName(), isDesc);
}
hasOrderBy = true;
}
private String getAttributeName(String fqdn) {
final String ret;
int lastSepIdx = fqdn.lastIndexOf('.');
return lastSepIdx == -1 ? fqdn : fqdn.substring(lastSepIdx + 1);
private static String quoted(String rhs) {
return IdentifierHelper.getQuoted(rhs);
}
private boolean needTransformation() {
return (hasGroupBy && hasSelect && hasOrderBy) || (hasGroupBy && hasOrderBy) || hasSelect;
}
private void addSelectTransformation(final SelectClauseComposer selectClauseComposer) {
GremlinClause fn;
if (selectClauseComposer.isSelectNoop) {
fn = GremlinClause.SELECT_NOOP_FN;
} else if (queryMetadata.hasGroupBy()){
fn = selectClauseComposer.onlyAggregators() ?
GremlinClause.SELECT_ONLY_AGG_GRP_FN :
GremlinClause.SELECT_MULTI_ATTR_GRP_FN;
private static String quoted(String rhs) {
return IdentifierHelper.getQuoted(rhs);
} else {
fn = selectClauseComposer.onlyAggregators() ?
GremlinClause.SELECT_ONLY_AGG_FN :
GremlinClause.SELECT_FN;
}
queryClauses.add(0, fn,
selectClauseComposer.getLabelHeader(),
selectClauseComposer.hasAssignmentExpr() ? selectClauseComposer.getAssignmentExprString(): "",
selectClauseComposer.getItemsString());
queryClauses.add(GremlinClause.INLINE_TRANSFORM_CALL);
}
private String addQuotesIfNecessary(String rhs) {
if(IdentifierHelper.isTrueOrFalse(rhs)) return rhs;
if(IdentifierHelper.isQuoted(rhs)) return rhs;
return quoted(rhs);
}
......@@ -369,20 +354,11 @@ public class GremlinQueryComposer {
}
private void close() {
// No limits or toList() need to be added to the nested queries
if (isNestedQuery) return;
if (isNestedQuery)
return;
if (hasLimitOffset) {
// If there are any aggregator functions then implicit limits shouldn't be applied
if (selectExprMetadata == null || !selectExprMetadata.hasAggregatorFunction()) {
if (offset.equalsIgnoreCase("0")) {
add(GremlinClause.LIMIT, limit);
} else {
addRangeClause(offset, limit);
}
} else {
LOG.warn("Query has aggregator function. Performance might be slow for large dataset");
}
if (!queryMetadata.hasLimitOffset()) {
addDefaultLimit();
}
if (queryClauses.isEmpty()) {
......@@ -390,14 +366,14 @@ public class GremlinQueryComposer {
return;
}
updatePosition(GremlinClause.LIMIT);
moveToLast(GremlinClause.LIMIT);
add(GremlinClause.TO_LIST);
updatePosition(GremlinClause.INLINE_TRANSFORM_CALL);
moveToLast(GremlinClause.INLINE_TRANSFORM_CALL);
}
private void updatePosition(GremlinClause clause) {
private void moveToLast(GremlinClause clause) {
int index = queryClauses.hasClause(clause);
if(-1 == index) {
if (-1 == index) {
return;
}
......@@ -414,25 +390,19 @@ public class GremlinQueryComposer {
}
}
private void introduceType(IdentifierHelper.Advice ia) {
if (!ia.isPrimitive() && ia.getIntroduceType()) {
private boolean introduceType(IdentifierHelper.Advice ia) {
if (ia.getIntroduceType()) {
add(GremlinClause.OUT, ia.getEdgeLabel());
context.registerActive(ia.getTypeName());
}
return ia.getIntroduceType();
}
private IdentifierHelper.Advice getAdvice(String actualTypeName) {
return IdentifierHelper.create(context, lookup, actualTypeName);
}
private String getJoinedQuotedStr(String[] elements) {
StringJoiner joiner = new StringJoiner(",");
Arrays.stream(elements)
.map(x -> x.contains("'") ? "\"" + x + "\"" : "'" + x + "'")
.forEach(joiner::add);
return joiner.toString();
}
private void add(GremlinClause clause, String... args) {
queryClauses.add(new GremlinClauseValue(clause, clause.get(args)));
}
......@@ -446,7 +416,7 @@ public class GremlinQueryComposer {
LOG.debug("addRangeClause(startIndex={}, endIndex={})", startIndex, endIndex);
}
if (hasSelect) {
if (queryMetadata.hasSelect()) {
add(queryClauses.size() - 1, GremlinClause.RANGE, startIndex, startIndex, endIndex);
} else {
add(GremlinClause.RANGE, startIndex, startIndex, endIndex);
......@@ -471,64 +441,6 @@ public class GremlinQueryComposer {
add(GremlinClause.GROUP_BY, ia.getQualifiedName());
}
private enum GremlinClause {
AS("as('%s')"),
DEDUP("dedup()"),
G("g"),
GROUP_BY("group().by('%s')"),
HAS("has('%s', %s)"),
HAS_OPERATOR("has('%s', %s(%s))"),
HAS_PROPERTY("has('%s')"),
WHERE("where(%s)"),
HAS_NOT_PROPERTY("hasNot('%s')"),
HAS_TYPE("has('__typeName', '%s')"),
HAS_TYPE_WITHIN("has('__typeName', within(%s))"),
HAS_WITHIN("has('%s', within(%s))"),
IN("in('%s')"),
OR("or(%s)"),
AND("and(%s)"),
NESTED_START("__"),
NESTED_HAS_OPERATOR("has('%s', %s(%s))"),
LIMIT("limit(%s)"),
ORDER_BY("order().by('%s')"),
ORDER_BY_DESC("order().by('%s', decr)"),
OUT("out('%s')"),
RANGE("range(%s, %s + %s)"),
SELECT("select('%s')"),
TO_LIST("toList()"),
TEXT_CONTAINS("has('%s', org.janusgraph.core.attribute.Text.textRegex(%s))"),
TEXT_PREFIX("has('%s', org.janusgraph.core.attribute.Text.textPrefix(%s))"),
TEXT_SUFFIX("has('%s', org.janusgraph.core.attribute.Text.textRegex(\".*\" + %s))"),
TRAIT("has('__traitNames', within('%s'))"),
SELECT_EXPR_NOOP_FN("def f(r){ r }; "),
SELECT_EXPR_HELPER_FN("def f(r){ return [[%s]].plus(r.collect({[%s]})).unique(); }; "),
SELECT_WITH_GRPBY_HELPER_FN("def f(r){ return [[%s]].plus(r.collect({it.values()}).flatten().collect({[%s]})).unique(); }; "),
GRPBY_ORDERBY_ASC_HELPER_FN("def f(r){ m=r.get(0); m.each({ k,v -> m[k] = v.sort{a,b -> a.value('%s') <=> b.value('%s')}}); r }; "),
GRPBY_ORDERBY_DESC_HELPER_FN("def f(r){ m=r.get(0); m.each({ k,v -> m[k] = v.sort{a,b -> b.value('%s') <=> a.value('%s')}}); r; }; "),
INLINE_COUNT("r.size()"),
INLINE_SUM("r.sum({it.value('%s')}).value('%s')"),
INLINE_MAX("r.max({it.value('%s')}).value('%s')"),
INLINE_MIN("r.min({it.value('%s')}).value('%s')"),
INLINE_GET_PROPERTY("it.value('%s')"),
INLINE_OUT_VERTEX("it.out('%s')"),
INLINE_OUT_VERTEX_VALUE("it.out('%s').value('%s')"), // This might require more closure introduction :(
INLINE_TRANSFORM_CALL("f(%s)"),
V("V()"),
VALUE_MAP("valueMap(%s)");
private final String format;
GremlinClause(String format) {
this.format = format;
}
String get(String... args) {
return (args == null || args.length == 0) ?
format :
String.format(format, args);
}
}
private static class GremlinClauseValue {
private final GremlinClause clause;
private final String value;
......@@ -604,7 +516,14 @@ public class GremlinQueryComposer {
}
public boolean isEmpty() {
return list.size() == 0;
return list.size() == 0 || containsGVLimit();
}
private boolean containsGVLimit() {
return list.size() == 3 &&
list.get(0).clause == GremlinClause.G &&
list.get(1).clause == GremlinClause.V &&
list.get(2).clause == GremlinClause.LIMIT;
}
public void clear() {
......@@ -624,6 +543,7 @@ public class GremlinQueryComposer {
Lookup lookup;
Map<String, String> aliasMap = new HashMap<>();
private AtlasType activeType;
private SelectClauseComposer selectClauseComposer;
public Context(List<String> errorList, Lookup lookup) {
this.lookup = lookup;
......@@ -659,12 +579,7 @@ public class GremlinQueryComposer {
}
public void registerAlias(String alias) {
if(aliasMap.containsKey(alias)) {
errorList.add(String.format("Duplicate alias found: %s for type %s already present.", alias, getActiveEntityType()));
return;
}
aliasMap.put(alias, getActiveTypeName());
addAlias(alias, getActiveTypeName());
}
public boolean hasAlias(String alias) {
......@@ -678,210 +593,18 @@ public class GremlinQueryComposer {
public boolean isEmpty() {
return activeType == null;
}
}
private static class RegistryBasedLookup implements Lookup {
private final List<String> errorList;
private final AtlasTypeRegistry typeRegistry;
public RegistryBasedLookup(List<String> errorList, AtlasTypeRegistry typeRegistry) {
this.errorList = errorList;
this.typeRegistry = typeRegistry;
}
@Override
public AtlasType getType(String typeName) {
try {
return typeRegistry.getType(typeName);
} catch (AtlasBaseException e) {
addError(e.getMessage());
}
return null;
}
@Override
public String getQualifiedName(Context context, String name) {
try {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return "";
}
return et.getQualifiedAttributeName(name);
} catch (AtlasBaseException e) {
addError(e.getMessage());
}
return "";
}
@Override
public boolean isPrimitive(Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return false;
}
AtlasType attr = et.getAttributeType(attributeName);
if(attr == null) {
return false;
}
TypeCategory attrTypeCategory = attr.getTypeCategory();
return (attrTypeCategory != null) && (attrTypeCategory == TypeCategory.PRIMITIVE || attrTypeCategory == TypeCategory.ENUM);
}
@Override
public String getRelationshipEdgeLabel(Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return "";
}
AtlasAttribute attr = et.getAttribute(attributeName);
return (attr != null) ? attr.getRelationshipEdgeLabel() : "";
}
@Override
public boolean hasAttribute(Context context, String typeName) {
return (context.getActiveEntityType() != null) && context.getActiveEntityType().getAttribute(typeName) != null;
}
@Override
public boolean doesTypeHaveSubTypes(Context context) {
return (context.getActiveEntityType() != null && context.getActiveEntityType().getAllSubTypes().size() > 0);
}
@Override
public String getTypeAndSubTypes(Context context) {
String[] str = context.getActiveEntityType() != null ?
context.getActiveEntityType().getTypeAndAllSubTypes().toArray(new String[]{}) :
new String[]{};
if(str.length == 0) {
return null;
}
String[] quoted = new String[str.length];
for (int i = 0; i < str.length; i++) {
quoted[i] = quoted(str[i]);
}
return StringUtils.join(quoted, ",");
}
@Override
public boolean isTraitType(Context context) {
return (context.getActiveType() != null &&
context.getActiveType().getTypeCategory() == TypeCategory.CLASSIFICATION);
}
@Override
public String getTypeFromEdge(Context context, String item) {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return "";
}
AtlasAttribute attr = et.getAttribute(item);
if(attr == null) {
return null;
}
AtlasType at = attr.getAttributeType();
if(at.getTypeCategory() == TypeCategory.ARRAY) {
AtlasArrayType arrType = ((AtlasArrayType)at);
return ((AtlasBuiltInTypes.AtlasObjectIdType) arrType.getElementType()).getObjectType();
public void setSelectClauseComposer(SelectClauseComposer selectClauseComposer) {
this.selectClauseComposer = selectClauseComposer;
}
return context.getActiveEntityType().getAttribute(item).getTypeName();
}
@Override
public boolean isDate(Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType();
if (et == null) {
return false;
}
AtlasType attr = et.getAttributeType(attributeName);
return attr != null && attr.getTypeName().equals(AtlasBaseTypeDef.ATLAS_TYPE_DATE);
}
protected void addError(String s) {
errorList.add(s);
}
}
static class SelectExprMetadata {
private String[] items;
private String[] labels;
private int countIdx = -1;
private int sumIdx = -1;
private int maxIdx = -1;
private int minIdx = -1;
private boolean hasAggregator = false;
public String[] getItems() {
return items;
}
public void setItems(final String[] items) {
this.items = items;
}
public int getCountIdx() {
return countIdx;
}
public void setCountIdx(final int countIdx) {
this.countIdx = countIdx;
setHasAggregator();
}
public int getSumIdx() {
return sumIdx;
}
public void setSumIdx(final int sumIdx) {
this.sumIdx = sumIdx;
setHasAggregator();
}
public int getMaxIdx() {
return maxIdx;
}
public void setMaxIdx(final int maxIdx) {
this.maxIdx = maxIdx;
setHasAggregator();
}
public int getMinIdx() {
return minIdx;
}
public void setMinIdx(final int minIdx) {
this.minIdx = minIdx;
setHasAggregator();
}
public String[] getLabels() {
return labels;
}
public void setLabels(final String[] labels) {
this.labels = labels;
}
public boolean hasAggregatorFunction(){
return hasAggregator;
public void addAlias(String alias, String typeName) {
if(aliasMap.containsKey(alias)) {
errorList.add(String.format("Duplicate alias found: %s for type %s already present.", alias, getActiveEntityType()));
return;
}
private void setHasAggregator() {
hasAggregator = true;
aliasMap.put(alias, typeName);
}
}
}
......@@ -89,6 +89,10 @@ public class IdentifierHelper {
return String.format("'%s'", s);
}
public static boolean isTrueOrFalse(String rhs) {
return rhs.equalsIgnoreCase("true") || rhs.equalsIgnoreCase("false");
}
public static class Advice {
private String raw;
private String actual;
......@@ -123,7 +127,7 @@ public class IdentifierHelper {
updateTypeInfo(lookup, context);
isTrait = lookup.isTraitType(context);
updateEdgeInfo(lookup, context);
introduceType = !context.hasAlias(parts[0]);
introduceType = !isPrimitive() && !context.hasAlias(parts[0]);
updateSubTypes(lookup, context);
}
}
......@@ -237,12 +241,12 @@ public class IdentifierHelper {
return actual;
}
public boolean isNewContext() {
return newContext;
}
public boolean isDate() {
return isDate;
}
public boolean hasParts() {
return parts.length > 1;
}
}
}
......@@ -18,6 +18,8 @@
package org.apache.atlas.query;
import org.apache.atlas.AtlasConfiguration;
public class QueryParams {
private int limit;
private int offset;
......@@ -47,4 +49,21 @@ public class QueryParams {
public void offset(int offset) {
this.offset = offset;
}
public static QueryParams getNormalizedParams(int suppliedLimit, int suppliedOffset) {
int defaultLimit = AtlasConfiguration.SEARCH_DEFAULT_LIMIT.getInt();
int maxLimit = AtlasConfiguration.SEARCH_MAX_LIMIT.getInt();
int limit = defaultLimit;
if (suppliedLimit > 0 && suppliedLimit <= maxLimit) {
limit = suppliedLimit;
}
int offset = 0;
if (suppliedOffset > 0) {
offset = suppliedOffset;
}
return new QueryParams(limit, offset);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.type.*;
import org.apache.commons.lang.StringUtils;
import java.util.List;
class RegistryBasedLookup implements Lookup {
private final List<String> errorList;
private final AtlasTypeRegistry typeRegistry;
public RegistryBasedLookup(List<String> errorList, AtlasTypeRegistry typeRegistry) {
this.errorList = errorList;
this.typeRegistry = typeRegistry;
}
@Override
public AtlasType getType(String typeName) {
try {
return typeRegistry.getType(typeName);
} catch (AtlasBaseException e) {
addError(e.getMessage());
}
return null;
}
@Override
public String getQualifiedName(GremlinQueryComposer.Context context, String name) {
try {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return "";
}
return et.getQualifiedAttributeName(name);
} catch (AtlasBaseException e) {
addError(e.getMessage());
}
return "";
}
@Override
public boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return false;
}
AtlasType attr = et.getAttributeType(attributeName);
if(attr == null) {
return false;
}
TypeCategory attrTypeCategory = attr.getTypeCategory();
return (attrTypeCategory != null) && (attrTypeCategory == TypeCategory.PRIMITIVE || attrTypeCategory == TypeCategory.ENUM);
}
@Override
public String getRelationshipEdgeLabel(GremlinQueryComposer.Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return "";
}
AtlasStructType.AtlasAttribute attr = et.getAttribute(attributeName);
return (attr != null) ? attr.getRelationshipEdgeLabel() : "";
}
@Override
public boolean hasAttribute(GremlinQueryComposer.Context context, String typeName) {
return (context.getActiveEntityType() != null) && context.getActiveEntityType().getAttribute(typeName) != null;
}
@Override
public boolean doesTypeHaveSubTypes(GremlinQueryComposer.Context context) {
return (context.getActiveEntityType() != null && context.getActiveEntityType().getAllSubTypes().size() > 0);
}
@Override
public String getTypeAndSubTypes(GremlinQueryComposer.Context context) {
String[] str = context.getActiveEntityType() != null ?
context.getActiveEntityType().getTypeAndAllSubTypes().toArray(new String[]{}) :
new String[]{};
if(str.length == 0) {
return null;
}
String[] quoted = new String[str.length];
for (int i = 0; i < str.length; i++) {
quoted[i] = IdentifierHelper.getQuoted(str[i]);
}
return StringUtils.join(quoted, ",");
}
@Override
public boolean isTraitType(GremlinQueryComposer.Context context) {
return (context.getActiveType() != null &&
context.getActiveType().getTypeCategory() == TypeCategory.CLASSIFICATION);
}
@Override
public String getTypeFromEdge(GremlinQueryComposer.Context context, String item) {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return "";
}
AtlasStructType.AtlasAttribute attr = et.getAttribute(item);
if(attr == null) {
return null;
}
AtlasType at = attr.getAttributeType();
if(at.getTypeCategory() == TypeCategory.ARRAY) {
AtlasArrayType arrType = ((AtlasArrayType)at);
return ((AtlasBuiltInTypes.AtlasObjectIdType) arrType.getElementType()).getObjectType();
}
return context.getActiveEntityType().getAttribute(item).getTypeName();
}
@Override
public boolean isDate(GremlinQueryComposer.Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType();
if (et == null) {
return false;
}
AtlasType attr = et.getAttributeType(attributeName);
return attr != null && attr.getTypeName().equals(AtlasBaseTypeDef.ATLAS_TYPE_DATE);
}
protected void addError(String s) {
errorList.add(s);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.StringJoiner;
class SelectClauseComposer {
private String[] labels;
private String[] items;
private Map<String, String> itemAssignmentExprs;
private int countIdx = -1;
private int sumIdx = -1;
private int maxIdx = -1;
private int minIdx = -1;
private int aggCount = 0;
public boolean isSelectNoop;
public SelectClauseComposer() {}
public String[] getItems() {
return items;
}
public void setItems(final String[] items) {
this.items = items;
}
public boolean updateAsApplicable(int currentIndex, String qualifiedName) {
boolean ret = false;
if (currentIndex == getCountIdx()) {
ret = assign(currentIndex, "count",
GremlinClause.INLINE_COUNT.get(), GremlinClause.INLINE_ASSIGNMENT);
} else if (currentIndex == getMinIdx()) {
ret = assign(currentIndex, "min", qualifiedName,
GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_MIN);
} else if (currentIndex == getMaxIdx()) {
ret = assign(currentIndex, "max", qualifiedName,
GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_MAX);
} else if (currentIndex == getSumIdx()) {
ret = assign(currentIndex, "sum", qualifiedName,
GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_SUM);
}
return ret;
}
private boolean assign(String item, String assignExpr) {
if (itemAssignmentExprs == null) {
itemAssignmentExprs = new LinkedHashMap<>();
}
itemAssignmentExprs.put(item, assignExpr);
return true;
}
public boolean assign(int i, String qualifiedName, GremlinClause clause) {
items[i] = clause.get(qualifiedName);
return true;
}
private boolean assign(int i, String s, String p1, GremlinClause clause) {
items[i] = s;
return assign(items[i], clause.get(s, p1));
}
private boolean assign(int i, String s, String p1, GremlinClause inline, GremlinClause clause) {
items[i] = s;
return assign(items[i], inline.get(s, clause.get(p1, p1)));
}
private int getCountIdx() {
return countIdx;
}
public void setCountIdx(final int countIdx) {
this.countIdx = countIdx;
aggCount++;
}
private int getSumIdx() {
return sumIdx;
}
public void setSumIdx(final int sumIdx) {
this.sumIdx = sumIdx;
aggCount++;
}
private int getMaxIdx() {
return maxIdx;
}
public void setMaxIdx(final int maxIdx) {
this.maxIdx = maxIdx;
aggCount++;
}
private int getMinIdx() {
return minIdx;
}
public void setMinIdx(final int minIdx) {
this.minIdx = minIdx;
aggCount++;
}
public String[] getLabels() {
return labels;
}
public void setLabels(final String[] labels) {
this.labels = labels;
}
public boolean hasAssignmentExpr() {
return itemAssignmentExprs != null && !itemAssignmentExprs.isEmpty();
}
public boolean onlyAggregators() {
return aggCount > 0 && aggCount == items.length;
}
public String getLabelHeader() {
return getJoinedQuotedStr(getLabels());
}
public String getItemsString() {
return String.join(",", getItems());
}
public String getAssignmentExprString(){
return String.join(" ", itemAssignmentExprs.values());
}
private String getJoinedQuotedStr(String[] elements) {
StringJoiner joiner = new StringJoiner(",");
Arrays.stream(elements)
.map(x -> x.contains("'") ? "\"" + x + "\"" : "'" + x + "'")
.forEach(joiner::add);
return joiner.toString();
}
public String getItem(int i) {
return items[i];
}
public String getLabel(int i) {
return labels[i];
}
}
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.RuntimeMetaData;
import org.antlr.v4.runtime.Vocabulary;
import org.antlr.v4.runtime.VocabularyImpl;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.atn.PredictionContextCache;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class AtlasDSLLexer extends Lexer {
......
......@@ -27,9 +27,9 @@ operator: (K_LT | K_LTE | K_EQ | K_NEQ | K_GT | K_GTE | K_LIKE) ;
sortOrder: K_ASC | K_DESC ;
valueArray: K_LBRACKET STRING (K_COMMA STRING)* K_RBRACKET ;
valueArray: K_LBRACKET ID (K_COMMA ID)* K_RBRACKET ;
literal: BOOL | NUMBER | FLOATING_NUMBER | (STRING | valueArray) ;
literal: BOOL | NUMBER | FLOATING_NUMBER | (ID | valueArray) ;
// Composite rules
limitClause: K_LIMIT NUMBER ;
......
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
......@@ -239,9 +238,9 @@ public class AtlasDSLParser extends Parser {
public static class ValueArrayContext extends ParserRuleContext {
public TerminalNode K_LBRACKET() { return getToken(AtlasDSLParser.K_LBRACKET, 0); }
public List<TerminalNode> STRING() { return getTokens(AtlasDSLParser.STRING); }
public TerminalNode STRING(int i) {
return getToken(AtlasDSLParser.STRING, i);
public List<TerminalNode> ID() { return getTokens(AtlasDSLParser.ID); }
public TerminalNode ID(int i) {
return getToken(AtlasDSLParser.ID, i);
}
public TerminalNode K_RBRACKET() { return getToken(AtlasDSLParser.K_RBRACKET, 0); }
public List<TerminalNode> K_COMMA() { return getTokens(AtlasDSLParser.K_COMMA); }
......@@ -269,7 +268,7 @@ public class AtlasDSLParser extends Parser {
setState(82);
match(K_LBRACKET);
setState(83);
match(STRING);
match(ID);
setState(88);
_errHandler.sync(this);
_la = _input.LA(1);
......@@ -279,7 +278,7 @@ public class AtlasDSLParser extends Parser {
setState(84);
match(K_COMMA);
setState(85);
match(STRING);
match(ID);
}
}
setState(90);
......@@ -305,7 +304,7 @@ public class AtlasDSLParser extends Parser {
public TerminalNode BOOL() { return getToken(AtlasDSLParser.BOOL, 0); }
public TerminalNode NUMBER() { return getToken(AtlasDSLParser.NUMBER, 0); }
public TerminalNode FLOATING_NUMBER() { return getToken(AtlasDSLParser.FLOATING_NUMBER, 0); }
public TerminalNode STRING() { return getToken(AtlasDSLParser.STRING, 0); }
public TerminalNode ID() { return getToken(AtlasDSLParser.ID, 0); }
public ValueArrayContext valueArray() {
return getRuleContext(ValueArrayContext.class,0);
}
......@@ -349,16 +348,16 @@ public class AtlasDSLParser extends Parser {
}
break;
case K_LBRACKET:
case STRING:
case ID:
enterOuterAlt(_localctx, 4);
{
setState(98);
_errHandler.sync(this);
switch (_input.LA(1)) {
case STRING:
case ID:
{
setState(96);
match(STRING);
match(ID);
}
break;
case K_LBRACKET:
......@@ -496,30 +495,23 @@ public class AtlasDSLParser extends Parser {
case BOOL:
case K_LBRACKET:
case ID:
case STRING:
enterOuterAlt(_localctx, 1);
{
setState(110);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ID:
switch ( getInterpreter().adaptivePredict(_input,3,_ctx) ) {
case 1:
{
setState(108);
identifier();
}
break;
case NUMBER:
case FLOATING_NUMBER:
case BOOL:
case K_LBRACKET:
case STRING:
case 2:
{
setState(109);
literal();
}
break;
default:
throw new NoViableAltException(this);
}
}
break;
......@@ -1491,25 +1483,19 @@ public class AtlasDSLParser extends Parser {
{
setState(208);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ID:
switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) {
case 1:
{
setState(206);
identifier();
}
break;
case NUMBER:
case FLOATING_NUMBER:
case BOOL:
case K_LBRACKET:
case STRING:
case 2:
{
setState(207);
literal();
}
break;
default:
throw new NoViableAltException(this);
}
setState(210);
match(K_AS);
......@@ -1621,25 +1607,19 @@ public class AtlasDSLParser extends Parser {
{
setState(221);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ID:
switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) {
case 1:
{
setState(219);
identifier();
}
break;
case NUMBER:
case FLOATING_NUMBER:
case BOOL:
case K_LBRACKET:
case STRING:
case 2:
{
setState(220);
literal();
}
break;
default:
throw new NoViableAltException(this);
}
}
break;
......@@ -2029,7 +2009,7 @@ public class AtlasDSLParser extends Parser {
setState(261);
_errHandler.sync(this);
_la = _input.LA(1);
while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << NUMBER) | (1L << FLOATING_NUMBER) | (1L << BOOL) | (1L << K_LPAREN) | (1L << K_LBRACKET) | (1L << K_FROM) | (1L << K_WHERE) | (1L << K_MAX) | (1L << K_MIN) | (1L << K_SUM) | (1L << K_COUNT) | (1L << ID) | (1L << STRING))) != 0)) {
while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << NUMBER) | (1L << FLOATING_NUMBER) | (1L << BOOL) | (1L << K_LPAREN) | (1L << K_LBRACKET) | (1L << K_FROM) | (1L << K_WHERE) | (1L << K_MAX) | (1L << K_MIN) | (1L << K_SUM) | (1L << K_COUNT) | (1L << ID))) != 0)) {
{
{
setState(258);
......@@ -2226,10 +2206,10 @@ public class AtlasDSLParser extends Parser {
"\2\64\u00d2\3\2\2\2\66\u00d7\3\2\2\28\u00e1\3\2\2\2:\u00e3\3\2\2\2<\u00e6"+
"\3\2\2\2>\u00ea\3\2\2\2@\u00ed\3\2\2\2B\u00f4\3\2\2\2D\u00f6\3\2\2\2F"+
"\u00fb\3\2\2\2H\u0103\3\2\2\2J\u010c\3\2\2\2L\u010e\3\2\2\2NO\7\62\2\2"+
"O\3\3\2\2\2PQ\t\2\2\2Q\5\3\2\2\2RS\t\3\2\2S\7\3\2\2\2TU\7\23\2\2UZ\7\63"+
"\2\2VW\7\t\2\2WY\7\63\2\2XV\3\2\2\2Y\\\3\2\2\2ZX\3\2\2\2Z[\3\2\2\2[]\3"+
"O\3\3\2\2\2PQ\t\2\2\2Q\5\3\2\2\2RS\t\3\2\2S\7\3\2\2\2TU\7\23\2\2UZ\7\62"+
"\2\2VW\7\t\2\2WY\7\62\2\2XV\3\2\2\2Y\\\3\2\2\2ZX\3\2\2\2Z[\3\2\2\2[]\3"+
"\2\2\2\\Z\3\2\2\2]^\7\25\2\2^\t\3\2\2\2_g\7\b\2\2`g\7\6\2\2ag\7\7\2\2"+
"be\7\63\2\2ce\5\b\5\2db\3\2\2\2dc\3\2\2\2eg\3\2\2\2f_\3\2\2\2f`\3\2\2"+
"be\7\62\2\2ce\5\b\5\2db\3\2\2\2dc\3\2\2\2eg\3\2\2\2f_\3\2\2\2f`\3\2\2"+
"\2fa\3\2\2\2fd\3\2\2\2g\13\3\2\2\2hi\7 \2\2ij\7\6\2\2j\r\3\2\2\2kl\7\'"+
"\2\2lm\7\6\2\2m\17\3\2\2\2nq\5\2\2\2oq\5\n\6\2pn\3\2\2\2po\3\2\2\2qw\3"+
"\2\2\2rs\7\22\2\2st\5,\27\2tu\7\24\2\2uw\3\2\2\2vp\3\2\2\2vr\3\2\2\2w"+
......
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
......
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
......
......@@ -17,12 +17,18 @@
*/
package org.apache.atlas.query;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.TokenStream;
import org.apache.atlas.TestModules;
import org.apache.atlas.discovery.EntityDiscoveryService;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.discovery.AtlasSearchResult;
import org.apache.atlas.query.antlr4.AtlasDSLLexer;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.runner.LocalSolrRunner;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
......@@ -31,7 +37,22 @@ import org.testng.annotations.Test;
import javax.inject.Inject;
import static org.testng.Assert.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
@Guice(modules = TestModules.TestOnlyModule.class)
public class DSLQueriesTest extends BasicTestSetup {
......@@ -257,6 +278,11 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_table where name='sales_fact', db where name='Reporting'", 0},
{"hive_table where name='sales_fact', db where name='Reporting' limit 10", 0},
{"hive_table where name='sales_fact', db where name='Reporting' limit 10 offset 1", 0},
{"hive_db as d where owner = ['John ETL', 'Jane BI']", 2},
{"hive_db as d where owner = ['John ETL', 'Jane BI'] limit 10", 2},
{"hive_db as d where owner = ['John ETL', 'Jane BI'] limit 10 offset 1", 1},
};
}
......@@ -360,12 +386,389 @@ public class DSLQueriesTest extends BasicTestSetup {
};
}
@DataProvider(name = "minMaxCountProvider")
private Object[][] minMaxCountQueries() {
return new Object[][]{
{"from hive_db groupby (owner) select count() ",
new FieldValueValidator()
.withFieldNames("count()")
.withExpectedValues(1)
.withExpectedValues(1)
.withExpectedValues(1) },
// FIXME
// { "from hive_db groupby (owner, name) select Asset.owner, Asset.name, count()",
// new FieldValueValidator()
// .withFieldNames("Asset.owner", "Asset.name", "count()")
// .withExpectedValues("Jane BI", "Reporting", 1)
// .withExpectedValues("Tim ETL", "Logging", 1)
// .withExpectedValues("John ETL", "Sales", 1) },
{ "from hive_db groupby (owner) select count() ",
new FieldValueValidator()
.withFieldNames("count()").
withExpectedValues(1).
withExpectedValues(1).
withExpectedValues(1) },
{ "from hive_db groupby (owner) select Asset.owner, count() ",
new FieldValueValidator()
.withFieldNames("Asset.owner", "count()")
.withExpectedValues("Jane BI", 1)
.withExpectedValues("Tim ETL", 1)
.withExpectedValues("John ETL", 1) },
{ "from hive_db groupby (owner) select count() ",
new FieldValueValidator()
.withFieldNames("count()")
.withExpectedValues(1)
.withExpectedValues(1)
.withExpectedValues(1) },
{ "from hive_db groupby (owner) select Asset.owner, count() ",
new FieldValueValidator()
.withFieldNames("Asset.owner", "count()")
.withExpectedValues("Jane BI", 1)
.withExpectedValues("Tim ETL", 1)
.withExpectedValues("John ETL", 1) },
{ "from hive_db groupby (owner) select Asset.owner, max(Asset.name) ",
new FieldValueValidator()
.withFieldNames("Asset.owner", "max(Asset.name)")
.withExpectedValues("Tim ETL", "Logging")
.withExpectedValues("Jane BI", "Reporting")
.withExpectedValues("John ETL", "Sales") },
{ "from hive_db groupby (owner) select max(Asset.name) ",
new FieldValueValidator()
.withFieldNames("max(Asset.name)")
.withExpectedValues("Logging")
.withExpectedValues("Reporting")
.withExpectedValues("Sales") },
{ "from hive_db groupby (owner) select owner, Asset.name, min(Asset.name) ",
new FieldValueValidator()
.withFieldNames("owner", "Asset.name", "min(Asset.name)")
.withExpectedValues("Tim ETL", "Logging", "Logging")
.withExpectedValues("Jane BI", "Reporting", "Reporting")
.withExpectedValues("John ETL", "Sales", "Sales") },
{ "from hive_db groupby (owner) select owner, min(Asset.name) ",
new FieldValueValidator()
.withFieldNames("owner", "min(Asset.name)")
.withExpectedValues("Tim ETL", "Logging")
.withExpectedValues("Jane BI", "Reporting")
.withExpectedValues("John ETL", "Sales") },
{ "from hive_db groupby (owner) select min(name) ",
new FieldValueValidator()
.withFieldNames("min(name)")
.withExpectedValues("Reporting")
.withExpectedValues("Logging")
.withExpectedValues("Sales") },
{ "from hive_db groupby (owner) select min('name') ",
new FieldValueValidator()
.withFieldNames("min('name')")
.withExpectedValues("name")
.withExpectedValues("name")
.withExpectedValues("name") },
{ "from hive_db select count() ",
new FieldValueValidator()
.withFieldNames("count()")
.withExpectedValues(3) },
{ "from Person select count() as 'count', max(Person.age) as 'max', min(Person.age) as 'min'",
new FieldValueValidator()
.withFieldNames("'count'", "'max'", "'min'")
.withExpectedValues(50, 0, 4) },
{ "from Person select count() as 'count', sum(Person.age) as 'sum'",
new FieldValueValidator()
.withFieldNames("'count'", "'sum'")
.withExpectedValues(4, 86) },
// tests to ensure that group by works with order by and limit
// FIXME:
// { "from hive_db groupby (owner) select min(name) orderby name limit 2 ",
// new FieldValueValidator()
// .withFieldNames("min(name)")
// .withExpectedValues("Logging")
// .withExpectedValues("Reporting") },
// { "from hive_db groupby (owner) select min(name) orderby name desc limit 2 ",
// new FieldValueValidator()
// .withFieldNames("min(name)")
// .withExpectedValues("Reporting")
// .withExpectedValues("Sales") }
};
}
@Test(dataProvider = "minMaxCountProvider")
public void minMaxCount(String query, FieldValueValidator fv) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, fv);
}
@Test(dataProvider = "likeQueriesProvider")
public void likeQueries(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, expected);
}
@Test
public void classification() {
String expected = "g.V().has('__traitNames', within('PII')).limit(25).toList()";
verify("PII", expected);
}
@Test
public void dimension() {
String expected = "g.V().has('__typeName', 'hive_table').has('__traitNames', within('Dimension')).limit(25).toList()";
verify("hive_table isa Dimension", expected);
verify("hive_table is Dimension", expected);
verify("hive_table where hive_table is Dimension", expected);
// Not supported since it requires two singleSrcQuery, one for isa clause other for where clause
// verify("Table isa Dimension where name = 'sales'",
// "g.V().has('__typeName', 'Table').has('__traitNames', within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()");
}
@Test
public void fromDB() {
verify("from hive_db", "g.V().has('__typeName', 'hive_db').limit(25).toList()");
verify("from hive_db limit 10", "g.V().has('__typeName', 'hive_db').limit(10).toList()");
verify("hive_db limit 10", "g.V().has('__typeName', 'hive_db').limit(10).toList()");
}
@Test
public void hasName() {
String expected = "g.V().has('__typeName', within('DataSet','hive_column_lineage','Infrastructure','Asset','Process','hive_table','hive_column','hive_db','hive_process')).has('Asset.name').limit(25).toList()";
verify("Asset has name", expected);
verify("Asset where Asset has name", expected);
}
@Test
public void simpleAlias() {
verify("Asset as a", "g.V().has('__typeName', within('DataSet','hive_column_lineage','Infrastructure','Asset','Process','hive_table','hive_column','hive_db','hive_process')).as('a').limit(25).toList()");
}
@Test
public void selectQueries() {
String expected = "def f(r){ t=[['d.name','d.owner']]; r.each({t.add([it.value('Asset.name'),it.value('Asset.owner')])}); t.unique(); }; " +
"f(g.V().has('__typeName', within('DataSet','hive_column_lineage','Infrastructure','Asset','Process','hive_table','hive_column','hive_db','hive_process')).as('d')";
verify("Asset as d select d.name, d.owner", expected + ".limit(25).toList())");
verify("Asset as d select d.name, d.owner limit 10", expected + ".limit(10).toList())");
}
@Test
public void tableSelectColumns() {
String exMain = "g.V().has('__typeName', 'hive_table').out('__hive_table.columns').limit(10).toList()";
String exSel = "def f(r){ r };";
String exSel1 = "def f(r){ t=[['db.name']]; r.each({t.add([it.value('Asset.name')])}); t.unique(); };";
verify("hive_table select columns limit 10", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'hive_table').out('__hive_table.db').limit(25).toList()";
verify("hive_table select db", getExpected(exSel, exMain2));
String exMain3 = "g.V().has('__typeName', 'hive_table').out('__hive_table.db').limit(25).toList()";
verify("hive_table select db.name", getExpected(exSel1, exMain3));
}
@Test(enabled = false)
public void SelectLimit() {
verify("from hive_db limit 5", "g.V().has('__typeName', 'hive_db').limit(5).toList()");
verify("from hive_db limit 5 offset 2", "g.V().has('__typeName', 'hive_db').range(2, 7).toList()");
}
@Test
public void orderBy() {
String expected = "g.V().has('__typeName', 'hive_db').order().by('Asset.name').limit(25).toList()";
verify("hive_db orderby name", expected);
verify("from hive_db orderby name", expected);
verify("from hive_db as d orderby d.owner limit 3", "g.V().has('__typeName', 'hive_db').as('d').order().by('Asset.owner').limit(3).toList()");
verify("hive_db as d orderby d.owner limit 3", "g.V().has('__typeName', 'hive_db').as('d').order().by('Asset.owner').limit(3).toList()");
String exSel = "def f(r){ t=[['d.name','d.owner']]; r.each({t.add([it.value('Asset.name'),it.value('Asset.owner')])}); t.unique(); };";
String exMain = "g.V().has('__typeName', 'hive_db').as('d').order().by('Asset.owner').limit(25).toList()";
verify("hive_db as d select d.name, d.owner orderby (d.owner) limit 25", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'hive_table').and(__.has('Asset.name', eq(\"sales_fact\")),__.has('hive_table.createTime', gt('1388563200000'))).order().by('hive_table.createTime').limit(25).toList()";
String exSel2 = "def f(r){ t=[['_col_0','_col_1']]; r.each({t.add([it.value('Asset.name'),it.value('hive_table.createTime')])}); t.unique(); };";
verify("hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby _col_1",
getExpected(exSel2, exMain2));
}
@Test
public void fromDBOrderByNameDesc() {
verify("from hive_db orderby name DESC", "g.V().has('__typeName', 'hive_db').order().by('Asset.name', decr).limit(25).toList()");
}
@Test
public void fromDBSelect() {
String expected = "def f(r){ t=[['Asset.name','Asset.owner']]; r.each({t.add([it.value('Asset.name'),it.value('Asset.owner')])}); t.unique(); };" +
" f(g.V().has('__typeName', 'hive_db').limit(25).toList())";
verify("from hive_db select Asset.name, Asset.owner", expected);
expected = "def f(r){ t=[['min(name)','max(owner)']]; " +
"def min=r.min({it.value('Asset.name')}).value('Asset.name'); " +
"def max=r.max({it.value('Asset.owner')}).value('Asset.owner'); " +
"t.add([min,max]); t;}; " +
"f(g.V().has('__typeName', 'hive_db').limit(25).toList())";
verify("hive_db select min(name), max(owner)", expected);
expected = "def f(r){ t=[['owner','min(name)','max(owner)']]; " +
"def min=r.min({it.value('Asset.name')}).value('Asset.name'); " +
"def max=r.max({it.value('Asset.owner')}).value('Asset.owner'); " +
"r.each({t.add([it.value('Asset.owner'),min,max])}); t.unique(); }; " +
"f(g.V().has('__typeName', 'hive_db').limit(25).toList())";
verify("hive_db select owner, min(name), max(owner)", expected);
}
@Test
public void fromDBGroupBy() {
verify("from hive_db groupby (Asset.owner)", "g.V().has('__typeName', 'hive_db').group().by('Asset.owner').limit(25).toList()");
}
@Test
public void whereClauseTextContains() {
String exMain = "g.V().has('__typeName', 'hive_db').has('Asset.name', eq(\"Reporting\")).limit(25).toList()";
String exSel = "def f(r){ t=[['name','owner']]; r.each({t.add([it.value('Asset.name'),it.value('Asset.owner')])}); t.unique(); };";
verify("from hive_db where name = \"Reporting\" select name, owner", getExpected(exSel, exMain));
verify("from hive_db where (name = \"Reporting\") select name, owner", getExpected(exSel, exMain));
verify("hive_table where Asset.name like \"Tab*\"",
"g.V().has('__typeName', 'hive_table').has('Asset.name', org.janusgraph.core.attribute.Text.textRegex(\"Tab.*\")).limit(25).toList()");
verify("from hive_table where (db.name = \"Reporting\")",
"g.V().has('__typeName', 'hive_table').out('__hive_table.db').has('Asset.name', eq(\"Reporting\")).dedup().in('__hive_table.db').limit(25).toList()");
}
@Test
public void whereClauseWithAsTextContains() {
String exSel = "def f(r){ t=[['t.name','t.owner']]; r.each({t.add([it.value('Asset.name'),it.value('Asset.owner')])}); t.unique(); };";
String exMain = "g.V().has('__typeName', 'hive_table').as('t').has('Asset.name', eq(\"testtable_1\")).limit(25).toList()";
verify("hive_table as t where t.name = \"testtable_1\" select t.name, t.owner)", getExpected(exSel, exMain));
}
@Test
public void whereClauseWithDateCompare() {
String exSel = "def f(r){ t=[['t.name','t.owner']]; r.each({t.add([it.value('Asset.name'),it.value('Asset.owner')])}); t.unique(); };";
String exMain = "g.V().has('__typeName', 'hive_table').as('t').has('hive_table.createTime', eq('1513046158440')).limit(25).toList()";
verify("hive_table as t where t.createTime = \"2017-12-12T02:35:58.440Z\" select t.name, t.owner)", getExpected(exSel, exMain));
}
@Test
public void subType() {
String exMain = "g.V().has('__typeName', within('DataSet','hive_column_lineage','Infrastructure','Asset','Process','hive_table','hive_column','hive_db','hive_process')).limit(25).toList()";
String exSel = "def f(r){ t=[['name','owner']]; r.each({t.add([it.value('Asset.name'),it.value('Asset.owner')])}); t.unique(); };";
verify("Asset select name, owner", getExpected(exSel, exMain));
}
@Test
public void TraitWithSpace() {
verify("`Log Data`", "g.V().has('__traitNames', within('Log Data')).limit(25).toList()");
}
@Test
public void nestedQueries() {
verify("hive_table where name=\"sales_fact\" or name=\"testtable_1\"",
"g.V().has('__typeName', 'hive_table').or(__.has('Asset.name', eq(\"sales_fact\")),__.has('Asset.name', eq(\"testtable_1\"))).limit(25).toList()");
verify("hive_table where name=\"sales_fact\" and name=\"testtable_1\"",
"g.V().has('__typeName', 'hive_table').and(__.has('Asset.name', eq(\"sales_fact\")),__.has('Asset.name', eq(\"testtable_1\"))).limit(25).toList()");
verify("hive_table where name=\"sales_fact\" or name=\"testtable_1\" or name=\"testtable_2\"",
"g.V().has('__typeName', 'hive_table')" +
".or(" +
"__.has('Asset.name', eq(\"sales_fact\"))," +
"__.has('Asset.name', eq(\"testtable_1\"))," +
"__.has('Asset.name', eq(\"testtable_2\"))" +
").limit(25).toList()");
verify("hive_table where name=\"sales_fact\" and name=\"testtable_1\" and name=\"testtable_2\"",
"g.V().has('__typeName', 'hive_table')" +
".and(" +
"__.has('Asset.name', eq(\"sales_fact\"))," +
"__.has('Asset.name', eq(\"testtable_1\"))," +
"__.has('Asset.name', eq(\"testtable_2\"))" +
").limit(25).toList()");
verify("hive_table where (name=\"sales_fact\" or name=\"testtable_1\") and name=\"testtable_2\"",
"g.V().has('__typeName', 'hive_table')" +
".and(" +
"__.or(" +
"__.has('Asset.name', eq(\"sales_fact\"))," +
"__.has('Asset.name', eq(\"testtable_1\"))" +
")," +
"__.has('Asset.name', eq(\"testtable_2\")))" +
".limit(25).toList()");
verify("hive_table where name=\"sales_fact\" or (name=\"testtable_1\" and name=\"testtable_2\")",
"g.V().has('__typeName', 'hive_table')" +
".or(" +
"__.has('Asset.name', eq(\"sales_fact\"))," +
"__.and(" +
"__.has('Asset.name', eq(\"testtable_1\"))," +
"__.has('Asset.name', eq(\"testtable_2\")))" +
")" +
".limit(25).toList()");
verify("hive_table where name=\"sales_fact\" or name=\"testtable_1\" and name=\"testtable_2\"",
"g.V().has('__typeName', 'hive_table')" +
".and(" +
"__.or(" +
"__.has('Asset.name', eq(\"sales_fact\"))," +
"__.has('Asset.name', eq(\"testtable_1\"))" +
")," +
"__.has('Asset.name', eq(\"testtable_2\")))" +
".limit(25).toList()");
verify("hive_table where (name=\"sales_fact\" and owner=\"Joe\") OR (name=\"sales_fact_daily_mv\" and owner=\"Joe BI\")",
"g.V().has('__typeName', 'hive_table')" +
".or(" +
"__.and(" +
"__.has('Asset.name', eq(\"sales_fact\"))," +
"__.has('Asset.owner', eq(\"Joe\"))" +
")," +
"__.and(" +
"__.has('Asset.name', eq(\"sales_fact_daily_mv\"))," +
"__.has('Asset.owner', eq(\"Joe BI\"))" +
"))" +
".limit(25).toList()");
verify("hive_table where owner=\"hdfs\" or ((name=\"testtable_1\" or name=\"testtable_2\") and createTime < \"2017-12-12T02:35:58.440Z\")",
"g.V().has('__typeName', 'hive_table').or(__.has('Asset.owner', eq(\"hdfs\")),__.and(__.or(__.has('Asset.name', eq(\"testtable_1\")),__.has('Asset.name', eq(\"testtable_2\"))),__.has('hive_table.createTime', lt('1513046158440')))).limit(25).toList()");
verify("hive_table where hive_table.name='Reporting' and hive_table.createTime < '2017-12-12T02:35:58.440Z'",
"g.V().has('__typeName', 'hive_table').and(__.has('Asset.name', eq('Reporting')),__.has('hive_table.createTime', lt('1513046158440'))).limit(25).toList()");
verify("hive_table where db.name='Sales' and db.clusterName='cl1'",
"g.V().has('__typeName', 'hive_table').and(__.out('__hive_table.db').has('Asset.name', eq('Sales')).dedup().in('__hive_table.db'),__.out('__hive_table.db').has('hive_db.clusterName', eq('cl1')).dedup().in('__hive_table.db')).limit(25).toList()");
}
private void verify(String dsl, String expectedGremlin) {
AtlasDSLParser.QueryContext queryContext = getParsedQuery(dsl);
String actualGremlin = getGremlinQuery(queryContext);
assertEquals(actualGremlin, expectedGremlin);
}
private String getExpected(String select, String main) {
return String.format("%s f(%s)", select, main);
}
private AtlasDSLParser.QueryContext getParsedQuery(String query) {
AtlasDSLParser.QueryContext queryContext = null;
InputStream stream = new ByteArrayInputStream(query.getBytes());
AtlasDSLLexer lexer = null;
try {
lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
} catch (IOException e) {
assertTrue(false);
}
TokenStream inputTokenStream = new CommonTokenStream(lexer);
AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream);
queryContext = parser.query();
assertNotNull(queryContext);
assertNull(queryContext.exception);
return queryContext;
}
private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) {
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(typeRegistry, new AtlasDSL.QueryMetadata(queryContext));
DSLVisitor qv = new DSLVisitor(gremlinQueryComposer);
qv.visit(queryContext);
String s = gremlinQueryComposer.get();
assertTrue(StringUtils.isNotEmpty(s));
return s;
}
private void assertSearchResult(AtlasSearchResult searchResult, int expected) {
assertNotNull(searchResult);
if(expected == 0) {
......@@ -379,4 +782,49 @@ public class DSLQueriesTest extends BasicTestSetup {
assertEquals(searchResult.getAttributes().getValues().size(), expected);
}
}
private void assertSearchResult(AtlasSearchResult searchResult, FieldValueValidator expected) {
assertNotNull(searchResult);
assertNull(searchResult.getEntities());
assertEquals(searchResult.getAttributes().getName().size(), expected.getFieldNamesCount());
for (int i = 0; i < searchResult.getAttributes().getName().size(); i++) {
String s = searchResult.getAttributes().getName().get(i);
assertEquals(s, expected.fieldNames[i]);
}
assertEquals(searchResult.getAttributes().getValues().size(), expected.values.size());
}
private class FieldValueValidator {
class ResultObject {
Map<String, Object> fieldValues = new HashMap<>();
public void setFieldValue(String string, Object object) {
fieldValues.put(string, object);
}
}
private String[] fieldNames;
private List<ResultObject> values = new ArrayList<>();
public FieldValueValidator withFieldNames(String... fieldNames) {
this.fieldNames = fieldNames;
return this;
}
public FieldValueValidator withExpectedValues(Object... values) {
ResultObject obj = new ResultObject();
for (int i = 0; i < fieldNames.length; i++) {
obj.setFieldValue(fieldNames[i], values[i]);
}
this.values.add(obj);
return this;
}
public int getFieldNamesCount() {
return (fieldNames != null) ? fieldNames.length : 0;
}
}
}
......@@ -56,9 +56,6 @@ public class GremlinQueryComposerTest {
verify("Table isa Dimension", expected);
verify("Table is Dimension", expected);
verify("Table where Table is Dimension", expected);
// Not supported since it requires two singleSrcQuery, one for isa clause other for where clause
// verify("Table isa Dimension where name = 'sales'",
// "g.V().has('__typeName', 'Table').has('__traitNames', within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()");
}
@Test
......@@ -82,7 +79,8 @@ public class GremlinQueryComposerTest {
@Test
public void DBasDSelect() {
String expected = "def f(r){ return [['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique(); }; f(g.V().has('__typeName', 'DB').as('d')";
String expected = "def f(r){ t=[['d.name','d.owner']]; r.each({t.add([it.value('DB.name'),it.value('DB.owner')])}); t.unique(); }; " +
"f(g.V().has('__typeName', 'DB').as('d')";
verify("DB as d select d.name, d.owner", expected + ".limit(25).toList())");
verify("DB as d select d.name, d.owner limit 10", expected + ".limit(10).toList())");
}
......@@ -91,7 +89,7 @@ public class GremlinQueryComposerTest {
public void tableSelectColumns() {
String exMain = "g.V().has('__typeName', 'Table').out('__Table.columns').limit(10).toList()";
String exSel = "def f(r){ r }";
String exSel1 = "def f(r){ return [['db.name']].plus(r.collect({[it.value('DB.name')]})).unique(); }";
String exSel1 = "def f(r){ t=[['db.name']]; r.each({t.add([it.value('DB.name')])}); t.unique(); }";
verify("Table select columns limit 10", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'Table').out('__Table.db').limit(25).toList()";
......@@ -102,15 +100,28 @@ public class GremlinQueryComposerTest {
}
@Test(enabled = false)
public void DBTableFrom() {
verify("Table, db", "g.V().has('__typeName', 'Table').out('__DB.Table').limit(25).toList()");
@Test
public void valueArray() {
verify("DB where owner = ['hdfs', 'anon']", "g.V().has('__typeName', 'DB').has('DB.owner', within('hdfs','anon')).limit(25).toList()");
}
@Test
public void groupByMin() {
verify("from DB groupby (owner) select min(name) orderby name limit 2",
"def f(l){ t=[['min(name)']]; l.get(0).each({k,r -> L:{ def min=r.min({it.value('DB.name')}).value('DB.name'); t.add([min]); } }); t; }; " +
"f(g.V().has('__typeName', 'DB').order().by('DB.name').group().by('DB.owner').limit(2).toList())");
}
@Test
public void groupByOrderBy() {
verify("Table groupby(owner) select name, owner, clusterName orderby name",
"def f(l){ t=[['name','owner','clusterName']]; l.get(0).each({k,r -> L:{ r.each({t.add([it.value('Table.name'),it.value('Table.owner'),it.value('Table.clusterName')])}) } }); t.unique(); }; " +
"f(g.V().has('__typeName', 'Table').order().by('Table.name').group().by('Table.owner').limit(25).toList())");
}
@Test
public void DBAsDSelectLimit() {
verify("from DB limit 5", "g.V().has('__typeName', 'DB').limit(5).toList()");
verify("from DB limit 5 offset 2", "g.V().has('__typeName', 'DB').range(2, 2 + 5).limit(25).toList()");
verify("from DB limit 5 offset 2", "g.V().has('__typeName', 'DB').range(2, 2 + 5).toList()");
}
@Test
......@@ -122,12 +133,12 @@ public class GremlinQueryComposerTest {
verify("DB as d orderby d.owner limit 3", "g.V().has('__typeName', 'DB').as('d').order().by('DB.owner').limit(3).toList()");
String exSel = "def f(r){ return [['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique(); }";
String exSel = "def f(r){ t=[['d.name','d.owner']]; r.each({t.add([it.value('DB.name'),it.value('DB.owner')])}); t.unique(); }";
String exMain = "g.V().has('__typeName', 'DB').as('d').order().by('DB.owner').limit(25).toList()";
verify("DB as d select d.name, d.owner orderby (d.owner) limit 25", getExpected(exSel, exMain));
String exMain2 = "g.V().has('__typeName', 'Table').and(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.createTime', gt('1388563200000'))).order().by('Table.createTime').limit(25).toList()";
String exSel2 = "def f(r){ return [['_col_0','_col_1']].plus(r.collect({[it.value('Table.name'),it.value('Table.createTime')]})).unique(); }";
String exSel2 = "def f(r){ t=[['_col_0','_col_1']]; r.each({t.add([it.value('Table.name'),it.value('Table.createTime')])}); t.unique(); }";
verify("Table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby _col_1",
getExpected(exSel2, exMain2));
}
......@@ -139,7 +150,7 @@ public class GremlinQueryComposerTest {
@Test
public void fromDBSelect() {
String expected = "def f(r){ return [['DB.name','DB.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique(); }; f(g.V().has('__typeName', 'DB').limit(25).toList())";
String expected = "def f(r){ t=[['DB.name','DB.owner']]; r.each({t.add([it.value('DB.name'),it.value('DB.owner')])}); t.unique(); }; f(g.V().has('__typeName', 'DB').limit(25).toList())";
verify("from DB select DB.name, DB.owner", expected);
}
......@@ -151,7 +162,7 @@ public class GremlinQueryComposerTest {
@Test
public void whereClauseTextContains() {
String exMain = "g.V().has('__typeName', 'DB').has('DB.name', eq(\"Reporting\")).limit(25).toList()";
String exSel = "def f(r){ return [['name','owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique(); }";
String exSel = "def f(r){ t=[['name','owner']]; r.each({t.add([it.value('DB.name'),it.value('DB.owner')])}); t.unique(); }";
verify("from DB where name = \"Reporting\" select name, owner", getExpected(exSel, exMain));
verify("from DB where (name = \"Reporting\") select name, owner", getExpected(exSel, exMain));
verify("Table where Asset.name like \"Tab*\"",
......@@ -162,14 +173,14 @@ public class GremlinQueryComposerTest {
@Test
public void whereClauseWithAsTextContains() {
String exSel = "def f(r){ return [['t.name','t.owner']].plus(r.collect({[it.value('Table.name'),it.value('Table.owner')]})).unique(); }";
String exSel = "def f(r){ t=[['t.name','t.owner']]; r.each({t.add([it.value('Table.name'),it.value('Table.owner')])}); t.unique(); }";
String exMain = "g.V().has('__typeName', 'Table').as('t').has('Table.name', eq(\"testtable_1\")).limit(25).toList()";
verify("Table as t where t.name = \"testtable_1\" select t.name, t.owner)", getExpected(exSel, exMain));
}
@Test
public void whereClauseWithDateCompare() {
String exSel = "def f(r){ return [['t.name','t.owner']].plus(r.collect({[it.value('Table.name'),it.value('Table.owner')]})).unique(); }";
String exSel = "def f(r){ t=[['t.name','t.owner']]; r.each({t.add([it.value('Table.name'),it.value('Table.owner')])}); t.unique(); }";
String exMain = "g.V().has('__typeName', 'Table').as('t').has('Table.createdTime', eq('1513046158440')).limit(25).toList()";
verify("Table as t where t.createdTime = \"2017-12-12T02:35:58.440Z\" select t.name, t.owner)", getExpected(exSel, exMain));
}
......@@ -177,17 +188,39 @@ public class GremlinQueryComposerTest {
@Test
public void subType() {
String exMain = "g.V().has('__typeName', within('Asset','Table')).limit(25).toList()";
String exSel = "def f(r){ return [['name','owner']].plus(r.collect({[it.value('Asset.name'),it.value('Asset.owner')]})).unique(); }";
String exSel = "def f(r){ t=[['name','owner']]; r.each({t.add([it.value('Asset.name'),it.value('Asset.owner')])}); t.unique(); }";
verify("Asset select name, owner", getExpected(exSel, exMain));
}
@Test
public void TraitWithSpace() {
public void countMinMax() {
verify("from DB groupby (owner) select count()",
"def f(l){ t=[['count()']]; l.get(0).each({k,r -> L:{ def count=r.size(); t.add([count]); } }); t; }; f(g.V().has('__typeName', 'DB').group().by('DB.owner').limit(25).toList())");
verify("from DB groupby (owner) select max(name)",
"def f(l){ t=[['max(name)']]; l.get(0).each({k,r -> L:{ def max=r.max({it.value('DB.name')}).value('DB.name'); t.add([max]); } }); t; }; f(g.V().has('__typeName', 'DB').group().by('DB.owner').limit(25).toList())");
verify("from DB groupby (owner) select min(name)",
"def f(l){ t=[['min(name)']]; l.get(0).each({k,r -> L:{ def min=r.min({it.value('DB.name')}).value('DB.name'); t.add([min]); } }); t; }; f(g.V().has('__typeName', 'DB').group().by('DB.owner').limit(25).toList())");
verify("from Table select sum(createTime)",
"def f(r){ t=[['sum(createTime)']]; def sum=r.sum({it.value('Table.createTime')}); t.add([sum]); t;}; f(g.V().has('__typeName', 'Table').limit(25).toList())");
}
@Test
public void traitWithSpace() {
verify("`Log Data`", "g.V().has('__typeName', 'Log Data').limit(25).toList()");
}
@Test
public void whereClauseWithBooleanCondition() {
String queryFormat = "Table as t where name ='Reporting' or t.isFile = %s";
String expectedFormat = "g.V().has('__typeName', 'Table').as('t').or(__.has('Table.name', eq('Reporting')),__.has('Table.isFile', eq(%s))).limit(25).toList()";
verify(String.format(queryFormat, "true"), String.format(expectedFormat, "true"));
verify(String.format(queryFormat, "false"), String.format(expectedFormat, "false"));
verify(String.format(queryFormat, "True"), String.format(expectedFormat, "True"));
verify(String.format(queryFormat, "FALSE"), String.format(expectedFormat, "FALSE"));
}
@Test
public void nestedQueries() {
verify("Table where name=\"sales_fact\" or name=\"testtable_1\"",
"g.V().has('__typeName', 'Table').or(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.name', eq(\"testtable_1\"))).limit(25).toList()");
......@@ -254,6 +287,11 @@ public class GremlinQueryComposerTest {
"g.V().has('__typeName', 'Table').and(__.out('__Table.db').has('DB.name', eq('Sales')).dedup().in('__Table.db'),__.out('__Table.db').has('DB.clusterName', eq('cl1')).dedup().in('__Table.db')).limit(25).toList()");
}
@Test
public void invalidQueries() {
verify("hdfs_path like h1", "");
}
private void verify(String dsl, String expectedGremlin) {
AtlasDSLParser.QueryContext queryContext = getParsedQuery(dsl);
String actualGremlin = getGremlinQuery(queryContext);
......@@ -289,13 +327,13 @@ public class GremlinQueryComposerTest {
AtlasTypeRegistry registry = mock(AtlasTypeRegistry.class);
org.apache.atlas.query.Lookup lookup = new TestLookup(errorList, registry);
GremlinQueryComposer.Context context = new GremlinQueryComposer.Context(errorList, lookup);
AtlasDSL.QueryMetadata queryMetadata = new AtlasDSL.QueryMetadata(queryContext);
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(lookup, context);
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(lookup, context, queryMetadata);
DSLVisitor qv = new DSLVisitor(gremlinQueryComposer);
qv.visit(queryContext);
String s = gremlinQueryComposer.get();
assertTrue(StringUtils.isNotEmpty(s));
return s;
}
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.services;
import com.google.inject.Inject;
import org.apache.atlas.TestModules;
import org.apache.atlas.discovery.EntityDiscoveryService;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.discovery.AtlasSearchResult;
import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.junit.Assert;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
@Guice(modules = TestModules.TestOnlyModule.class)
public class EntityDiscoveryServiceTest {
@Inject
AtlasTypeRegistry typeRegistry;
@Inject
private AtlasTypeDefStore typeDefStore;
@Inject
private AtlasGraph atlasGraph;
@Inject
EntityDiscoveryService entityDiscoveryService;
@Test
public void dslTest() throws AtlasBaseException {
//String dslQuery = "DB where name = \"Reporting\"";
String dslQuery = "hive_table where Asset.name = \"testtable_x_0\"";
AtlasSearchResult result = entityDiscoveryService.searchUsingDslQuery(dslQuery, 20 , 0);
Assert.assertNotNull(result);
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment