Commit b5989c84 by Ashutosh Mestry Committed by apoorvnaik

Bugfixes for ATLAS-2333, ATLAS-2334

ATLAS-2229: ValueArray clause implementation. ATLAS-2229: DSL valueArray clause, added unit tests. ATLAS-2229: DSLVisitor refactoring. ATLAS-2229: Fixed REST call with classification. ATLAS-2229: GroupBy, Select, Min, Max, Count ATLAS-2229: Additional unit tests for min, max, count. Updated test framework. ATLAS-2229: Handling case where query is invalid. ATLAS-2229: Updates related to boolean value handling. ATLAS-2229: Count, Sum, Max, Min operations implemented. ATLAS-2229: Addressed review comments. Signed-off-by: 's avatarapoorvnaik <apoorvnaik@apache.org>
parent ab316c9b
......@@ -19,7 +19,6 @@ package org.apache.atlas.discovery;
import com.google.common.annotations.VisibleForTesting;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasConfiguration;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
import org.apache.atlas.SortOrder;
......@@ -36,6 +35,7 @@ import org.apache.atlas.model.profile.AtlasUserSavedSearch;
import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.query.GremlinQuery;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper;
......@@ -67,21 +67,16 @@ import javax.inject.Inject;
import javax.script.Bindings;
import javax.script.ScriptEngine;
import javax.script.ScriptException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.atlas.AtlasErrorCode.*;
import java.util.*;
import static org.apache.atlas.AtlasErrorCode.CLASSIFICATION_NOT_FOUND;
import static org.apache.atlas.AtlasErrorCode.DISCOVERY_QUERY_FAILED;
import static org.apache.atlas.AtlasErrorCode.UNKNOWN_TYPENAME;
import static org.apache.atlas.SortOrder.ASCENDING;
import static org.apache.atlas.SortOrder.DESCENDING;
import static org.apache.atlas.model.TypeCategory.*;
import static org.apache.atlas.model.TypeCategory.ARRAY;
import static org.apache.atlas.model.TypeCategory.MAP;
import static org.apache.atlas.model.TypeCategory.OBJECT_ID_TYPE;
import static org.apache.atlas.model.instance.AtlasEntity.Status.ACTIVE;
import static org.apache.atlas.model.instance.AtlasEntity.Status.DELETED;
import static org.apache.atlas.repository.graph.GraphHelper.EDGE_LABEL_PREFIX;
......@@ -177,7 +172,7 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
public AtlasSearchResult searchUsingFullTextQuery(String fullTextQuery, boolean excludeDeletedEntities, int limit, int offset)
throws AtlasBaseException {
AtlasSearchResult ret = new AtlasSearchResult(fullTextQuery, AtlasQueryType.FULL_TEXT);
QueryParams params = validateSearchParams(limit, offset);
QueryParams params = QueryParams.getNormalizedParams(limit, offset);
AtlasIndexQuery idxQuery = toAtlasIndexQuery(fullTextQuery);
if (LOG.isDebugEnabled()) {
......@@ -200,7 +195,7 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
LOG.debug("Executing basic search query: {} with type: {} and classification: {}", query, typeName, classification);
}
final QueryParams params = validateSearchParams(limit, offset);
final QueryParams params = QueryParams.getNormalizedParams(limit, offset);
Set<String> typeNames = null;
Set<String> classificationNames = null;
String attrQualifiedName = null;
......@@ -417,7 +412,7 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
@GraphTransaction
public AtlasSearchResult searchWithParameters(SearchParameters searchParameters) throws AtlasBaseException {
AtlasSearchResult ret = new AtlasSearchResult(searchParameters);
final QueryParams params = validateSearchParams(searchParameters.getLimit(),searchParameters.getOffset());
final QueryParams params = QueryParams.getNormalizedParams(searchParameters.getLimit(),searchParameters.getOffset());
searchParameters.setLimit(params.limit());
searchParameters.setOffset(params.offset());
......@@ -554,7 +549,7 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
}
}
QueryParams params = validateSearchParams(limit, offset);
QueryParams params = QueryParams.getNormalizedParams(limit, offset);
ScriptEngine scriptEngine = graph.getGremlinScriptEngine();
Bindings bindings = scriptEngine.createBindings();
Set<String> states = getEntityStates();
......@@ -675,8 +670,8 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
}
private GremlinQuery toGremlinQuery(String query, int limit, int offset) throws AtlasBaseException {
QueryParams params = validateSearchParams(limit, offset);
GremlinQuery gremlinQuery = new AtlasDSL.Translator(AtlasDSL.Parser.parse(query), typeRegistry, params.offset(), params.limit()).translate();
QueryParams params = QueryParams.getNormalizedParams(limit, offset);
GremlinQuery gremlinQuery = new AtlasDSL.Translator(query, typeRegistry, params.offset(), params.limit()).translate();
if (LOG.isDebugEnabled()) {
LOG.debug("Translated Gremlin Query: {}", gremlinQuery.queryStr());
......@@ -685,23 +680,6 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
return gremlinQuery;
}
private QueryParams validateSearchParams(int limitParam, int offsetParam) {
int defaultLimit = AtlasConfiguration.SEARCH_DEFAULT_LIMIT.getInt();
int maxLimit = AtlasConfiguration.SEARCH_MAX_LIMIT.getInt();
int limit = defaultLimit;
if (limitParam > 0 && limitParam <= maxLimit) {
limit = limitParam;
}
int offset = 0;
if (offsetParam > 0) {
offset = offsetParam;
}
return new QueryParams(limit, offset);
}
private AtlasIndexQuery toAtlasIndexQuery(String fullTextQuery) {
String graphQuery = String.format("v.\"%s\":(%s)", Constants.ENTITY_TEXT_PROPERTY_KEY, fullTextQuery);
return graph.indexQuery(Constants.FULLTEXT_INDEX, graphQuery);
......@@ -709,8 +687,6 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
private AttributeSearchResult toAttributesResult(List results, GremlinQuery query) {
AttributeSearchResult ret = new AttributeSearchResult();
// List<String> names = extractNames(results);
// List<List<Object>> values = extractValues(results);
List<String> names = (List<String>) results.get(0);
List<List<Object>> values = extractValues(results.subList(1, results.size()));
......@@ -920,6 +896,8 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
@Override
public String getDslQueryUsingTypeNameClassification(String query, String typeName, String classification) {
final String whereDSLKeyword = "where";
final String isaDSLKeyword = "isa";
final String isDSLKeyword = "is";
final String limitDSLKeyword = "limit";
final String whereFormat = whereDSLKeyword + " %s";
......@@ -928,7 +906,10 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
if (StringUtils.isNotEmpty(typeName)) {
if(StringUtils.isNotEmpty(query)) {
String s = query.toLowerCase();
if(!s.startsWith(whereDSLKeyword) && !s.startsWith(limitDSLKeyword)) {
if(!s.startsWith(whereDSLKeyword) &&
!s.startsWith(limitDSLKeyword) &&
!s.startsWith(isaDSLKeyword) &&
!s.startsWith(isDSLKeyword)) {
queryStr = String.format(whereFormat, query);
}
}
......@@ -939,7 +920,7 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
if (StringUtils.isNotEmpty(classification)) {
// isa works with a type name only - like hive_column isa PII; it doesn't work with more complex query
if (StringUtils.isEmpty(query)) {
queryStr += (" isa " + classification);
queryStr += String.format("%s %s %s", queryStr, isaDSLKeyword, classification);
}
}
return queryStr;
......
......@@ -53,7 +53,7 @@ public class AtlasDSL {
return RESERVED_KEYWORDS.contains(word);
}
public static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
private static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
AtlasDSLParser.QueryContext ret;
try {
......@@ -88,7 +88,6 @@ public class AtlasDSL {
@Override
public void syntaxError(final Recognizer<?, ?> recognizer, final Object offendingSymbol, final int line, final int charPositionInLine, final String msg, final RecognitionException e) {
// TODO: Capture multiple datapoints
isValid = false;
errorMsg = msg;
}
......@@ -108,7 +107,11 @@ public class AtlasDSL {
private final int offset;
private final int limit;
public Translator(final AtlasDSLParser.QueryContext queryContext, AtlasTypeRegistry typeRegistry, int offset, int limit) {
public Translator(String query, AtlasTypeRegistry typeRegistry, int offset, int limit) throws AtlasBaseException {
this(Parser.parse(query), typeRegistry, offset, limit);
}
private Translator(final AtlasDSLParser.QueryContext queryContext, AtlasTypeRegistry typeRegistry, int offset, int limit) {
this.queryContext = queryContext;
this.typeRegistry = typeRegistry;
this.offset = offset;
......@@ -116,20 +119,46 @@ public class AtlasDSL {
}
public GremlinQuery translate() {
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(typeRegistry);
if (offset >= 0) {
if (!gremlinQueryComposer.hasLimitOffset()) {
gremlinQueryComposer.addLimit(Integer.toString(limit), Integer.toString(offset));
}
}
QueryMetadata queryMetadata = new QueryMetadata(queryContext);
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(typeRegistry, queryMetadata, limit, offset);
DSLVisitor dslVisitor = new DSLVisitor(gremlinQueryComposer);
// Now process the Query and collect translation in
queryContext.accept(dslVisitor);
return new GremlinQuery(gremlinQueryComposer.get(), queryMetadata.hasSelect());
}
}
public static class QueryMetadata {
private boolean hasSelect;
private boolean hasGroupBy;
private boolean hasOrderBy;
private boolean hasLimitOffset;
public QueryMetadata(AtlasDSLParser.QueryContext queryContext) {
hasSelect = queryContext != null && queryContext.selectClause() != null;
hasGroupBy = queryContext != null && queryContext.groupByExpression() != null;
hasOrderBy = queryContext != null && queryContext.orderByExpr() != null;
hasLimitOffset = queryContext != null && queryContext.limitOffset() != null;
}
public boolean hasSelect() {
return hasSelect;
}
public boolean hasGroupBy() {
return hasGroupBy;
}
public boolean hasOrderBy() {
return hasOrderBy;
}
public boolean hasLimitOffset() {
return hasLimitOffset;
}
return new GremlinQuery(gremlinQueryComposer.get(), gremlinQueryComposer.hasSelect());
public boolean needTransformation() {
return (hasGroupBy && hasSelect && hasOrderBy) || (hasGroupBy && hasOrderBy) || hasSelect;
}
}
}
......@@ -66,7 +66,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
LOG.debug("=> DSLVisitor.visitLimitOffset({})", ctx);
}
gremlinQueryComposer.addLimit(ctx.limitClause().NUMBER().toString(),
gremlinQueryComposer.addLimit(ctx.limitClause().NUMBER().getText(),
(ctx.offsetClause() == null ? "0" : ctx.offsetClause().NUMBER().getText()));
return super.visitLimitOffset(ctx);
}
......@@ -85,7 +85,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
String[] items = new String[ctx.selectExpression().size()];
String[] labels = new String[ctx.selectExpression().size()];
GremlinQueryComposer.SelectExprMetadata selectExprMetadata = new GremlinQueryComposer.SelectExprMetadata();
SelectClauseComposer selectClauseComposer = new SelectClauseComposer();
for (int i = 0; i < ctx.selectExpression().size(); i++) {
SelectExpressionContext selectExpression = ctx.selectExpression(i);
......@@ -99,24 +99,24 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
if (Objects.nonNull(countClause)) {
items[i] = "count";
selectExprMetadata.setCountIdx(i);
selectClauseComposer.setCountIdx(i);
} else if (Objects.nonNull(sumClause)) {
items[i] = sumClause.expr().getText();
selectExprMetadata.setSumIdx(i);
selectClauseComposer.setSumIdx(i);
} else if (Objects.nonNull(minClause)) {
items[i] = minClause.expr().getText();
selectExprMetadata.setMinIdx(i);
selectClauseComposer.setMinIdx(i);
} else if (Objects.nonNull(maxClause)) {
items[i] = maxClause.expr().getText();
selectExprMetadata.setMaxIdx(i);
selectClauseComposer.setMaxIdx(i);
} else {
items[i] = selectExpression.expr().getText();
}
}
selectExprMetadata.setItems(items);
selectExprMetadata.setLabels(labels);
gremlinQueryComposer.addSelect(selectExprMetadata);
selectClauseComposer.setItems(items);
selectClauseComposer.setLabels(labels);
gremlinQueryComposer.addSelect(selectClauseComposer);
}
return super.visitSelectExpr(ctx);
}
......@@ -251,8 +251,16 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
if (comparisonClause != null) {
String lhs = comparisonClause.arithE(0).getText();
String op = comparisonClause.operator().getText().toUpperCase();
String rhs = comparisonClause.arithE(1).getText();
String op, rhs;
AtomEContext atomECtx = comparisonClause.arithE(1).multiE().atomE();
if(atomECtx.literal() == null ||
(atomECtx.literal() != null && atomECtx.literal().valueArray() == null)) {
op = comparisonClause.operator().getText().toUpperCase();
rhs = comparisonClause.arithE(1).getText();
} else {
op = "in";
rhs = getInClause(atomECtx);
}
gremlinQueryComposer.addWhere(lhs, op, rhs);
} else {
......@@ -260,4 +268,16 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
}
}
}
private String getInClause(AtomEContext atomEContext) {
StringBuilder sb = new StringBuilder();
ValueArrayContext valueArrayContext = atomEContext.literal().valueArray();
int startIdx = 1;
int endIdx = valueArrayContext.children.size() - 1;
for (int i = startIdx; i < endIdx; i++) {
sb.append(valueArrayContext.getChild(i));
}
return sb.toString();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
enum GremlinClause {
AS("as('%s')"),
DEDUP("dedup()"),
G("g"),
GROUP_BY("group().by('%s')"),
HAS("has('%s', %s)"),
HAS_OPERATOR("has('%s', %s(%s))"),
HAS_PROPERTY("has('%s')"),
WHERE("where(%s)"),
HAS_NOT_PROPERTY("hasNot('%s')"),
HAS_TYPE("has('__typeName', '%s')"),
HAS_TYPE_WITHIN("has('__typeName', within(%s))"),
HAS_WITHIN("has('%s', within(%s))"),
IN("in('%s')"),
OR("or(%s)"),
AND("and(%s)"),
NESTED_START("__"),
NESTED_HAS_OPERATOR("has('%s', %s(%s))"),
LIMIT("limit(%s)"),
ORDER_BY("order().by('%s')"),
ORDER_BY_DESC("order().by('%s', decr)"),
OUT("out('%s')"),
RANGE("range(%s, %s + %s)"),
SELECT("select('%s')"),
TO_LIST("toList()"),
TEXT_CONTAINS("has('%s', org.janusgraph.core.attribute.Text.textRegex(%s))"),
TEXT_PREFIX("has('%s', org.janusgraph.core.attribute.Text.textPrefix(%s))"),
TEXT_SUFFIX("has('%s', org.janusgraph.core.attribute.Text.textRegex(\".*\" + %s))"),
TRAIT("has('__traitNames', within('%s'))"),
SELECT_NOOP_FN("def f(r){ r }; "),
SELECT_FN("def f(r){ t=[[%s]]; %s r.each({t.add([%s])}); t.unique(); }; "),
SELECT_ONLY_AGG_FN("def f(r){ t=[[%s]]; %s t.add([%s]); t;}; "),
SELECT_ONLY_AGG_GRP_FN("def f(l){ t=[[%s]]; l.get(0).each({k,r -> L:{ %s t.add([%s]); } }); t; }; "),
SELECT_MULTI_ATTR_GRP_FN("def f(l){ t=[[%s]]; l.get(0).each({k,r -> L:{ %s r.each({t.add([%s])}) } }); t.unique(); }; "),
INLINE_ASSIGNMENT("def %s=%s;"),
INLINE_LIST_RANGE("[%s..<%s]"),
INLINE_COUNT("r.size()"),
INLINE_SUM("r.sum({it.value('%s')})"),
INLINE_MAX("r.max({it.value('%s')}).value('%s')"),
INLINE_MIN("r.min({it.value('%s')}).value('%s')"),
INLINE_GET_PROPERTY("it.value('%s')"),
INLINE_TRANSFORM_CALL("f(%s)"),
V("V()"),
VALUE_MAP("valueMap(%s)");
private final String template;
GremlinClause(String template) {
this.template = template;
}
String get() {
return template;
}
String get(String... args) {
return (args == null || args.length == 0) ? template : String.format(template, args);
}
}
......@@ -89,6 +89,10 @@ public class IdentifierHelper {
return String.format("'%s'", s);
}
public static boolean isTrueOrFalse(String rhs) {
return rhs.equalsIgnoreCase("true") || rhs.equalsIgnoreCase("false");
}
public static class Advice {
private String raw;
private String actual;
......@@ -123,7 +127,7 @@ public class IdentifierHelper {
updateTypeInfo(lookup, context);
isTrait = lookup.isTraitType(context);
updateEdgeInfo(lookup, context);
introduceType = !context.hasAlias(parts[0]);
introduceType = !isPrimitive() && !context.hasAlias(parts[0]);
updateSubTypes(lookup, context);
}
}
......@@ -237,12 +241,12 @@ public class IdentifierHelper {
return actual;
}
public boolean isNewContext() {
return newContext;
}
public boolean isDate() {
return isDate;
}
}
public boolean hasParts() {
return parts.length > 1;
}
}
}
......@@ -18,6 +18,8 @@
package org.apache.atlas.query;
import org.apache.atlas.AtlasConfiguration;
public class QueryParams {
private int limit;
private int offset;
......@@ -47,4 +49,21 @@ public class QueryParams {
public void offset(int offset) {
this.offset = offset;
}
public static QueryParams getNormalizedParams(int suppliedLimit, int suppliedOffset) {
int defaultLimit = AtlasConfiguration.SEARCH_DEFAULT_LIMIT.getInt();
int maxLimit = AtlasConfiguration.SEARCH_MAX_LIMIT.getInt();
int limit = defaultLimit;
if (suppliedLimit > 0 && suppliedLimit <= maxLimit) {
limit = suppliedLimit;
}
int offset = 0;
if (suppliedOffset > 0) {
offset = suppliedOffset;
}
return new QueryParams(limit, offset);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.type.*;
import org.apache.commons.lang.StringUtils;
import java.util.List;
class RegistryBasedLookup implements Lookup {
private final List<String> errorList;
private final AtlasTypeRegistry typeRegistry;
public RegistryBasedLookup(List<String> errorList, AtlasTypeRegistry typeRegistry) {
this.errorList = errorList;
this.typeRegistry = typeRegistry;
}
@Override
public AtlasType getType(String typeName) {
try {
return typeRegistry.getType(typeName);
} catch (AtlasBaseException e) {
addError(e.getMessage());
}
return null;
}
@Override
public String getQualifiedName(GremlinQueryComposer.Context context, String name) {
try {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return "";
}
return et.getQualifiedAttributeName(name);
} catch (AtlasBaseException e) {
addError(e.getMessage());
}
return "";
}
@Override
public boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return false;
}
AtlasType attr = et.getAttributeType(attributeName);
if(attr == null) {
return false;
}
TypeCategory attrTypeCategory = attr.getTypeCategory();
return (attrTypeCategory != null) && (attrTypeCategory == TypeCategory.PRIMITIVE || attrTypeCategory == TypeCategory.ENUM);
}
@Override
public String getRelationshipEdgeLabel(GremlinQueryComposer.Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return "";
}
AtlasStructType.AtlasAttribute attr = et.getAttribute(attributeName);
return (attr != null) ? attr.getRelationshipEdgeLabel() : "";
}
@Override
public boolean hasAttribute(GremlinQueryComposer.Context context, String typeName) {
return (context.getActiveEntityType() != null) && context.getActiveEntityType().getAttribute(typeName) != null;
}
@Override
public boolean doesTypeHaveSubTypes(GremlinQueryComposer.Context context) {
return (context.getActiveEntityType() != null && context.getActiveEntityType().getAllSubTypes().size() > 0);
}
@Override
public String getTypeAndSubTypes(GremlinQueryComposer.Context context) {
String[] str = context.getActiveEntityType() != null ?
context.getActiveEntityType().getTypeAndAllSubTypes().toArray(new String[]{}) :
new String[]{};
if(str.length == 0) {
return null;
}
String[] quoted = new String[str.length];
for (int i = 0; i < str.length; i++) {
quoted[i] = IdentifierHelper.getQuoted(str[i]);
}
return StringUtils.join(quoted, ",");
}
@Override
public boolean isTraitType(GremlinQueryComposer.Context context) {
return (context.getActiveType() != null &&
context.getActiveType().getTypeCategory() == TypeCategory.CLASSIFICATION);
}
@Override
public String getTypeFromEdge(GremlinQueryComposer.Context context, String item) {
AtlasEntityType et = context.getActiveEntityType();
if(et == null) {
return "";
}
AtlasStructType.AtlasAttribute attr = et.getAttribute(item);
if(attr == null) {
return null;
}
AtlasType at = attr.getAttributeType();
if(at.getTypeCategory() == TypeCategory.ARRAY) {
AtlasArrayType arrType = ((AtlasArrayType)at);
return ((AtlasBuiltInTypes.AtlasObjectIdType) arrType.getElementType()).getObjectType();
}
return context.getActiveEntityType().getAttribute(item).getTypeName();
}
@Override
public boolean isDate(GremlinQueryComposer.Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType();
if (et == null) {
return false;
}
AtlasType attr = et.getAttributeType(attributeName);
return attr != null && attr.getTypeName().equals(AtlasBaseTypeDef.ATLAS_TYPE_DATE);
}
protected void addError(String s) {
errorList.add(s);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.StringJoiner;
class SelectClauseComposer {
private String[] labels;
private String[] items;
private Map<String, String> itemAssignmentExprs;
private int countIdx = -1;
private int sumIdx = -1;
private int maxIdx = -1;
private int minIdx = -1;
private int aggCount = 0;
public boolean isSelectNoop;
public SelectClauseComposer() {}
public String[] getItems() {
return items;
}
public void setItems(final String[] items) {
this.items = items;
}
public boolean updateAsApplicable(int currentIndex, String qualifiedName) {
boolean ret = false;
if (currentIndex == getCountIdx()) {
ret = assign(currentIndex, "count",
GremlinClause.INLINE_COUNT.get(), GremlinClause.INLINE_ASSIGNMENT);
} else if (currentIndex == getMinIdx()) {
ret = assign(currentIndex, "min", qualifiedName,
GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_MIN);
} else if (currentIndex == getMaxIdx()) {
ret = assign(currentIndex, "max", qualifiedName,
GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_MAX);
} else if (currentIndex == getSumIdx()) {
ret = assign(currentIndex, "sum", qualifiedName,
GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_SUM);
}
return ret;
}
private boolean assign(String item, String assignExpr) {
if (itemAssignmentExprs == null) {
itemAssignmentExprs = new LinkedHashMap<>();
}
itemAssignmentExprs.put(item, assignExpr);
return true;
}
public boolean assign(int i, String qualifiedName, GremlinClause clause) {
items[i] = clause.get(qualifiedName);
return true;
}
private boolean assign(int i, String s, String p1, GremlinClause clause) {
items[i] = s;
return assign(items[i], clause.get(s, p1));
}
private boolean assign(int i, String s, String p1, GremlinClause inline, GremlinClause clause) {
items[i] = s;
return assign(items[i], inline.get(s, clause.get(p1, p1)));
}
private int getCountIdx() {
return countIdx;
}
public void setCountIdx(final int countIdx) {
this.countIdx = countIdx;
aggCount++;
}
private int getSumIdx() {
return sumIdx;
}
public void setSumIdx(final int sumIdx) {
this.sumIdx = sumIdx;
aggCount++;
}
private int getMaxIdx() {
return maxIdx;
}
public void setMaxIdx(final int maxIdx) {
this.maxIdx = maxIdx;
aggCount++;
}
private int getMinIdx() {
return minIdx;
}
public void setMinIdx(final int minIdx) {
this.minIdx = minIdx;
aggCount++;
}
public String[] getLabels() {
return labels;
}
public void setLabels(final String[] labels) {
this.labels = labels;
}
public boolean hasAssignmentExpr() {
return itemAssignmentExprs != null && !itemAssignmentExprs.isEmpty();
}
public boolean onlyAggregators() {
return aggCount > 0 && aggCount == items.length;
}
public String getLabelHeader() {
return getJoinedQuotedStr(getLabels());
}
public String getItemsString() {
return String.join(",", getItems());
}
public String getAssignmentExprString(){
return String.join(" ", itemAssignmentExprs.values());
}
private String getJoinedQuotedStr(String[] elements) {
StringJoiner joiner = new StringJoiner(",");
Arrays.stream(elements)
.map(x -> x.contains("'") ? "\"" + x + "\"" : "'" + x + "'")
.forEach(joiner::add);
return joiner.toString();
}
public String getItem(int i) {
return items[i];
}
public String getLabel(int i) {
return labels[i];
}
}
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.RuntimeMetaData;
import org.antlr.v4.runtime.Vocabulary;
import org.antlr.v4.runtime.VocabularyImpl;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.atn.PredictionContextCache;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class AtlasDSLLexer extends Lexer {
......
......@@ -27,9 +27,9 @@ operator: (K_LT | K_LTE | K_EQ | K_NEQ | K_GT | K_GTE | K_LIKE) ;
sortOrder: K_ASC | K_DESC ;
valueArray: K_LBRACKET STRING (K_COMMA STRING)* K_RBRACKET ;
valueArray: K_LBRACKET ID (K_COMMA ID)* K_RBRACKET ;
literal: BOOL | NUMBER | FLOATING_NUMBER | (STRING | valueArray) ;
literal: BOOL | NUMBER | FLOATING_NUMBER | (ID | valueArray) ;
// Composite rules
limitClause: K_LIMIT NUMBER ;
......
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
......@@ -239,9 +238,9 @@ public class AtlasDSLParser extends Parser {
public static class ValueArrayContext extends ParserRuleContext {
public TerminalNode K_LBRACKET() { return getToken(AtlasDSLParser.K_LBRACKET, 0); }
public List<TerminalNode> STRING() { return getTokens(AtlasDSLParser.STRING); }
public TerminalNode STRING(int i) {
return getToken(AtlasDSLParser.STRING, i);
public List<TerminalNode> ID() { return getTokens(AtlasDSLParser.ID); }
public TerminalNode ID(int i) {
return getToken(AtlasDSLParser.ID, i);
}
public TerminalNode K_RBRACKET() { return getToken(AtlasDSLParser.K_RBRACKET, 0); }
public List<TerminalNode> K_COMMA() { return getTokens(AtlasDSLParser.K_COMMA); }
......@@ -269,7 +268,7 @@ public class AtlasDSLParser extends Parser {
setState(82);
match(K_LBRACKET);
setState(83);
match(STRING);
match(ID);
setState(88);
_errHandler.sync(this);
_la = _input.LA(1);
......@@ -279,7 +278,7 @@ public class AtlasDSLParser extends Parser {
setState(84);
match(K_COMMA);
setState(85);
match(STRING);
match(ID);
}
}
setState(90);
......@@ -305,7 +304,7 @@ public class AtlasDSLParser extends Parser {
public TerminalNode BOOL() { return getToken(AtlasDSLParser.BOOL, 0); }
public TerminalNode NUMBER() { return getToken(AtlasDSLParser.NUMBER, 0); }
public TerminalNode FLOATING_NUMBER() { return getToken(AtlasDSLParser.FLOATING_NUMBER, 0); }
public TerminalNode STRING() { return getToken(AtlasDSLParser.STRING, 0); }
public TerminalNode ID() { return getToken(AtlasDSLParser.ID, 0); }
public ValueArrayContext valueArray() {
return getRuleContext(ValueArrayContext.class,0);
}
......@@ -349,16 +348,16 @@ public class AtlasDSLParser extends Parser {
}
break;
case K_LBRACKET:
case STRING:
case ID:
enterOuterAlt(_localctx, 4);
{
setState(98);
_errHandler.sync(this);
switch (_input.LA(1)) {
case STRING:
case ID:
{
setState(96);
match(STRING);
match(ID);
}
break;
case K_LBRACKET:
......@@ -496,30 +495,23 @@ public class AtlasDSLParser extends Parser {
case BOOL:
case K_LBRACKET:
case ID:
case STRING:
enterOuterAlt(_localctx, 1);
{
setState(110);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ID:
switch ( getInterpreter().adaptivePredict(_input,3,_ctx) ) {
case 1:
{
setState(108);
identifier();
}
break;
case NUMBER:
case FLOATING_NUMBER:
case BOOL:
case K_LBRACKET:
case STRING:
case 2:
{
setState(109);
literal();
}
break;
default:
throw new NoViableAltException(this);
}
}
break;
......@@ -1491,25 +1483,19 @@ public class AtlasDSLParser extends Parser {
{
setState(208);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ID:
switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) {
case 1:
{
setState(206);
identifier();
}
break;
case NUMBER:
case FLOATING_NUMBER:
case BOOL:
case K_LBRACKET:
case STRING:
case 2:
{
setState(207);
literal();
}
break;
default:
throw new NoViableAltException(this);
}
setState(210);
match(K_AS);
......@@ -1621,25 +1607,19 @@ public class AtlasDSLParser extends Parser {
{
setState(221);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ID:
switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) {
case 1:
{
setState(219);
identifier();
}
break;
case NUMBER:
case FLOATING_NUMBER:
case BOOL:
case K_LBRACKET:
case STRING:
case 2:
{
setState(220);
literal();
}
break;
default:
throw new NoViableAltException(this);
}
}
break;
......@@ -2029,7 +2009,7 @@ public class AtlasDSLParser extends Parser {
setState(261);
_errHandler.sync(this);
_la = _input.LA(1);
while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << NUMBER) | (1L << FLOATING_NUMBER) | (1L << BOOL) | (1L << K_LPAREN) | (1L << K_LBRACKET) | (1L << K_FROM) | (1L << K_WHERE) | (1L << K_MAX) | (1L << K_MIN) | (1L << K_SUM) | (1L << K_COUNT) | (1L << ID) | (1L << STRING))) != 0)) {
while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << NUMBER) | (1L << FLOATING_NUMBER) | (1L << BOOL) | (1L << K_LPAREN) | (1L << K_LBRACKET) | (1L << K_FROM) | (1L << K_WHERE) | (1L << K_MAX) | (1L << K_MIN) | (1L << K_SUM) | (1L << K_COUNT) | (1L << ID))) != 0)) {
{
{
setState(258);
......@@ -2226,10 +2206,10 @@ public class AtlasDSLParser extends Parser {
"\2\64\u00d2\3\2\2\2\66\u00d7\3\2\2\28\u00e1\3\2\2\2:\u00e3\3\2\2\2<\u00e6"+
"\3\2\2\2>\u00ea\3\2\2\2@\u00ed\3\2\2\2B\u00f4\3\2\2\2D\u00f6\3\2\2\2F"+
"\u00fb\3\2\2\2H\u0103\3\2\2\2J\u010c\3\2\2\2L\u010e\3\2\2\2NO\7\62\2\2"+
"O\3\3\2\2\2PQ\t\2\2\2Q\5\3\2\2\2RS\t\3\2\2S\7\3\2\2\2TU\7\23\2\2UZ\7\63"+
"\2\2VW\7\t\2\2WY\7\63\2\2XV\3\2\2\2Y\\\3\2\2\2ZX\3\2\2\2Z[\3\2\2\2[]\3"+
"O\3\3\2\2\2PQ\t\2\2\2Q\5\3\2\2\2RS\t\3\2\2S\7\3\2\2\2TU\7\23\2\2UZ\7\62"+
"\2\2VW\7\t\2\2WY\7\62\2\2XV\3\2\2\2Y\\\3\2\2\2ZX\3\2\2\2Z[\3\2\2\2[]\3"+
"\2\2\2\\Z\3\2\2\2]^\7\25\2\2^\t\3\2\2\2_g\7\b\2\2`g\7\6\2\2ag\7\7\2\2"+
"be\7\63\2\2ce\5\b\5\2db\3\2\2\2dc\3\2\2\2eg\3\2\2\2f_\3\2\2\2f`\3\2\2"+
"be\7\62\2\2ce\5\b\5\2db\3\2\2\2dc\3\2\2\2eg\3\2\2\2f_\3\2\2\2f`\3\2\2"+
"\2fa\3\2\2\2fd\3\2\2\2g\13\3\2\2\2hi\7 \2\2ij\7\6\2\2j\r\3\2\2\2kl\7\'"+
"\2\2lm\7\6\2\2m\17\3\2\2\2nq\5\2\2\2oq\5\n\6\2pn\3\2\2\2po\3\2\2\2qw\3"+
"\2\2\2rs\7\22\2\2st\5,\27\2tu\7\24\2\2uw\3\2\2\2vp\3\2\2\2vr\3\2\2\2w"+
......
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
......
// Generated from repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.services;
import com.google.inject.Inject;
import org.apache.atlas.TestModules;
import org.apache.atlas.discovery.EntityDiscoveryService;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.discovery.AtlasSearchResult;
import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.junit.Assert;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
@Guice(modules = TestModules.TestOnlyModule.class)
public class EntityDiscoveryServiceTest {
@Inject
AtlasTypeRegistry typeRegistry;
@Inject
private AtlasTypeDefStore typeDefStore;
@Inject
private AtlasGraph atlasGraph;
@Inject
EntityDiscoveryService entityDiscoveryService;
@Test
public void dslTest() throws AtlasBaseException {
//String dslQuery = "DB where name = \"Reporting\"";
String dslQuery = "hive_table where Asset.name = \"testtable_x_0\"";
AtlasSearchResult result = entityDiscoveryService.searchUsingDslQuery(dslQuery, 20 , 0);
Assert.assertNotNull(result);
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment