Commit 4c9a3bf7 by Ashutosh Mestry Committed by Madhan Neethiraj

ATLAS-2360: error handling improvements in DSL

parent 6b4c3aa7
...@@ -103,7 +103,7 @@ public enum AtlasErrorCode { ...@@ -103,7 +103,7 @@ public enum AtlasErrorCode {
SAVED_SEARCH_CHANGE_USER(400, "ATLAS-400-00-056", "saved-search {0} can not be moved from user {1} to {2}"), SAVED_SEARCH_CHANGE_USER(400, "ATLAS-400-00-056", "saved-search {0} can not be moved from user {1} to {2}"),
INVALID_QUERY_PARAM_LENGTH(400, "ATLAS-400-00-057" , "Length of query param {0} exceeds the limit"), INVALID_QUERY_PARAM_LENGTH(400, "ATLAS-400-00-057" , "Length of query param {0} exceeds the limit"),
INVALID_QUERY_LENGTH(400, "ATLAS-400-00-058" , "Invalid query length, update {0} to change the limit" ), INVALID_QUERY_LENGTH(400, "ATLAS-400-00-058" , "Invalid query length, update {0} to change the limit" ),
INVALID_DSL_QUERY(400, "ATLAS-400-00-059" , "Invalid DSL query: {0} Reason: {1}. Please refer to Atlas DSL grammar for more information" ), INVALID_DSL_QUERY(400, "ATLAS-400-00-059" , "Invalid DSL query: {0} | Reason: {1}. Please refer to Atlas DSL grammar for more information" ),
// All Not found enums go here // All Not found enums go here
TYPE_NAME_NOT_FOUND(404, "ATLAS-404-00-001", "Given typename {0} was invalid"), TYPE_NAME_NOT_FOUND(404, "ATLAS-404-00-001", "Given typename {0} was invalid"),
......
...@@ -29,6 +29,8 @@ import org.apache.atlas.exception.AtlasBaseException; ...@@ -29,6 +29,8 @@ import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.query.antlr4.AtlasDSLLexer; import org.apache.atlas.query.antlr4.AtlasDSLLexer;
import org.apache.atlas.query.antlr4.AtlasDSLParser; import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -58,7 +60,6 @@ public class AtlasDSL { ...@@ -58,7 +60,6 @@ public class AtlasDSL {
static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException { static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
AtlasDSLParser.QueryContext ret; AtlasDSLParser.QueryContext ret;
try { try {
InputStream stream = new ByteArrayInputStream(queryStr.getBytes()); InputStream stream = new ByteArrayInputStream(queryStr.getBytes());
AtlasDSLLexer lexer = new AtlasDSLLexer(CharStreams.fromStream(stream)); AtlasDSLLexer lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
Validator validator = new Validator(); Validator validator = new Validator();
...@@ -81,7 +82,6 @@ public class AtlasDSL { ...@@ -81,7 +82,6 @@ public class AtlasDSL {
return ret; return ret;
} }
} }
static class Validator extends BaseErrorListener { static class Validator extends BaseErrorListener {
...@@ -108,25 +108,50 @@ public class AtlasDSL { ...@@ -108,25 +108,50 @@ public class AtlasDSL {
private final AtlasTypeRegistry typeRegistry; private final AtlasTypeRegistry typeRegistry;
private final int offset; private final int offset;
private final int limit; private final int limit;
private final String query;
public Translator(String query, AtlasTypeRegistry typeRegistry, int offset, int limit) throws AtlasBaseException { public Translator(String query, AtlasTypeRegistry typeRegistry, int offset, int limit) throws AtlasBaseException {
this(Parser.parse(query), typeRegistry, offset, limit); this.query = query;
} this.queryContext = Parser.parse(query);
private Translator(final AtlasDSLParser.QueryContext queryContext, AtlasTypeRegistry typeRegistry, int offset, int limit) {
this.queryContext = queryContext;
this.typeRegistry = typeRegistry; this.typeRegistry = typeRegistry;
this.offset = offset; this.offset = offset;
this.limit = limit; this.limit = limit;
} }
public GremlinQuery translate() { public GremlinQuery translate() throws AtlasBaseException {
QueryMetadata queryMetadata = new QueryMetadata(queryContext); QueryMetadata queryMetadata = new QueryMetadata(queryContext);
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(typeRegistry, queryMetadata, limit, offset); GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(typeRegistry, queryMetadata, limit, offset);
DSLVisitor dslVisitor = new DSLVisitor(gremlinQueryComposer); DSLVisitor dslVisitor = new DSLVisitor(gremlinQueryComposer);
queryContext.accept(dslVisitor); try {
return new GremlinQuery(gremlinQueryComposer.get(), queryMetadata.hasSelect()); queryContext.accept(dslVisitor);
processErrorList(gremlinQueryComposer, null);
return new GremlinQuery(gremlinQueryComposer.get(), queryMetadata.hasSelect());
} catch (Exception e) {
processErrorList(gremlinQueryComposer, e);
}
return null;
}
private void processErrorList(GremlinQueryComposer gremlinQueryComposer, Exception e) throws AtlasBaseException {
final String errorMessage;
if (CollectionUtils.isNotEmpty(gremlinQueryComposer.getErrorList())) {
errorMessage = StringUtils.join(gremlinQueryComposer.getErrorList(), ", ");
} else {
errorMessage = e != null ? (e.getMessage() != null ? e.getMessage() : e.toString()) : null;
}
if (errorMessage != null) {
if (e != null) {
throw new AtlasBaseException(AtlasErrorCode.INVALID_DSL_QUERY, e, this.query, errorMessage);
}
throw new AtlasBaseException(AtlasErrorCode.INVALID_DSL_QUERY, this.query, errorMessage);
}
} }
} }
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
package org.apache.atlas.query; package org.apache.atlas.query;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import java.util.regex.Matcher; import java.util.regex.Matcher;
...@@ -61,7 +62,13 @@ public class IdentifierHelper { ...@@ -61,7 +62,13 @@ public class IdentifierHelper {
public static String getQualifiedName(org.apache.atlas.query.Lookup lookup, public static String getQualifiedName(org.apache.atlas.query.Lookup lookup,
GremlinQueryComposer.Context context, GremlinQueryComposer.Context context,
String name) { String name) {
return lookup.getQualifiedName(context, name); try {
return lookup.getQualifiedName(context, name);
} catch (AtlasBaseException e) {
context.getErrorList().add(String.format("Error for %s.%s: %s", context.getActiveTypeName(), name, e.getMessage()));
}
return "";
} }
public static boolean isQuoted(String val) { public static boolean isQuoted(String val) {
...@@ -101,7 +108,6 @@ public class IdentifierHelper { ...@@ -101,7 +108,6 @@ public class IdentifierHelper {
private String attributeName; private String attributeName;
private boolean isPrimitive; private boolean isPrimitive;
private String edgeLabel; private String edgeLabel;
private String edgeDirection;
private boolean introduceType; private boolean introduceType;
private boolean hasSubtypes; private boolean hasSubtypes;
private String subTypes; private String subTypes;
...@@ -117,18 +123,22 @@ public class IdentifierHelper { ...@@ -117,18 +123,22 @@ public class IdentifierHelper {
} }
private void update(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) { private void update(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
newContext = context.isEmpty(); try {
if(!newContext) { newContext = context.isEmpty();
if(context.aliasMap.containsKey(this.raw)) { if (!newContext) {
raw = context.aliasMap.get(this.raw); if (context.hasAlias(this.raw)) {
} raw = context.getTypeNameFromAlias(this.raw);
}
updateParts(); updateParts();
updateTypeInfo(lookup, context); updateTypeInfo(lookup, context);
isTrait = lookup.isTraitType(context); isTrait = lookup.isTraitType(context);
updateEdgeInfo(lookup, context); updateEdgeInfo(lookup, context);
introduceType = !isPrimitive() && !context.hasAlias(parts[0]); introduceType = !isPrimitive() && !context.hasAlias(parts[0]);
updateSubTypes(lookup, context); updateSubTypes(lookup, context);
}
} catch (NullPointerException ex) {
context.getErrorList().add(ex.getMessage());
} }
} }
...@@ -146,50 +156,65 @@ public class IdentifierHelper { ...@@ -146,50 +156,65 @@ public class IdentifierHelper {
private void updateEdgeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) { private void updateEdgeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(isPrimitive == false && isTrait == false) { if(isPrimitive == false && isTrait == false) {
edgeLabel = lookup.getRelationshipEdgeLabel(context, attributeName); edgeLabel = lookup.getRelationshipEdgeLabel(context, attributeName);
edgeDirection = "OUT";
typeName = lookup.getTypeFromEdge(context, attributeName); typeName = lookup.getTypeFromEdge(context, attributeName);
} }
} }
private void updateTypeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) { private void updateTypeInfo(org.apache.atlas.query.Lookup lookup, GremlinQueryComposer.Context context) {
if(parts.length == 1) { if(parts.length == 1) {
typeName = context.getActiveTypeName(); typeName = context.hasAlias(parts[0]) ?
context.getTypeNameFromAlias(parts[0]) :
context.getActiveTypeName();
qualifiedName = getDefaultQualifiedNameForSinglePartName(context, parts[0]);
attributeName = parts[0]; attributeName = parts[0];
isAttribute = lookup.hasAttribute(context, typeName);
qualifiedName = lookup.getQualifiedName(context, attributeName);
isPrimitive = lookup.isPrimitive(context, attributeName);
setIsDate(lookup, context);
} }
if(parts.length == 2) { if(parts.length == 2) {
if(context.hasAlias(parts[0])) { boolean isAttrOfActiveType = lookup.hasAttribute(context, parts[0]);
typeName = context.getTypeNameFromAlias(parts[0]); if(isAttrOfActiveType) {
attributeName = parts[0];
} else {
typeName = context.hasAlias(parts[0]) ?
context.getTypeNameFromAlias(parts[0]) :
parts[0];
attributeName = parts[1]; attributeName = parts[1];
isPrimitive = lookup.isPrimitive(context, attributeName);
setIsDate(lookup, context);
}
else {
isAttribute = lookup.hasAttribute(context, parts[0]);
if(isAttribute) {
attributeName = parts[0];
isPrimitive = lookup.isPrimitive(context, attributeName);
setIsDate(lookup, context);
} else {
typeName = parts[0];
attributeName = parts[1];
isPrimitive = lookup.isPrimitive(context, attributeName);
setIsDate(lookup, context);
}
} }
}
isAttribute = lookup.hasAttribute(context, attributeName);
isPrimitive = lookup.isPrimitive(context, attributeName);
setQualifiedName(lookup, context, isAttribute, attributeName);
setIsDate(lookup, context, isPrimitive, attributeName);
}
private String getDefaultQualifiedNameForSinglePartName(GremlinQueryComposer.Context context, String s) {
String qn = context.getTypeNameFromAlias(s);
if(StringUtils.isEmpty(qn) && SelectClauseComposer.isKeyword(s)) {
return s;
}
qualifiedName = lookup.getQualifiedName(context, attributeName); return qn;
}
private void setQualifiedName(Lookup lookup, GremlinQueryComposer.Context context, boolean isAttribute, String attrName) {
if(isAttribute) {
qualifiedName = getQualifiedName(lookup, context, attrName);
} }
} }
private void setIsDate(Lookup lookup, GremlinQueryComposer.Context context) { private String getQualifiedName(Lookup lookup, GremlinQueryComposer.Context context, String name) {
try {
return lookup.getQualifiedName(context, name);
} catch (AtlasBaseException e) {
context.getErrorList().add(String.format("Error for %s.%s: %s", context.getActiveTypeName(), name, e.getMessage()));
return "";
}
}
private void setIsDate(Lookup lookup, GremlinQueryComposer.Context context, boolean isPrimitive, String attrName) {
if(isPrimitive) { if(isPrimitive) {
isDate = lookup.isDate(context, attributeName); isDate = lookup.isDate(context, attrName);
} }
} }
...@@ -248,5 +273,9 @@ public class IdentifierHelper { ...@@ -248,5 +273,9 @@ public class IdentifierHelper {
public boolean hasParts() { public boolean hasParts() {
return parts.length > 1; return parts.length > 1;
} }
}
public String getRaw() {
return raw;
}
}
} }
...@@ -18,15 +18,16 @@ ...@@ -18,15 +18,16 @@
package org.apache.atlas.query; package org.apache.atlas.query;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.type.AtlasType; import org.apache.atlas.type.AtlasType;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
public interface Lookup { public interface Lookup {
AtlasType getType(String typeName); AtlasType getType(String typeName) throws AtlasBaseException;
String getQualifiedName(GremlinQueryComposer.Context context, String name); String getQualifiedName(GremlinQueryComposer.Context context, String name) throws AtlasBaseException;
boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName); boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName);
...@@ -43,6 +44,4 @@ public interface Lookup { ...@@ -43,6 +44,4 @@ public interface Lookup {
String getTypeFromEdge(GremlinQueryComposer.Context context, String item); String getTypeFromEdge(GremlinQueryComposer.Context context, String item);
boolean isDate(GremlinQueryComposer.Context context, String attributeName); boolean isDate(GremlinQueryComposer.Context context, String attributeName);
List<String> getErrorList();
} }
...@@ -20,6 +20,7 @@ package org.apache.atlas.query; ...@@ -20,6 +20,7 @@ package org.apache.atlas.query;
import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory; import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.instance.AtlasObjectId;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef; import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.type.*; import org.apache.atlas.type.*;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
...@@ -37,30 +38,18 @@ class RegistryBasedLookup implements Lookup { ...@@ -37,30 +38,18 @@ class RegistryBasedLookup implements Lookup {
} }
@Override @Override
public AtlasType getType(String typeName) { public AtlasType getType(String typeName) throws AtlasBaseException {
try { return typeRegistry.getType(typeName);
return typeRegistry.getType(typeName);
} catch (AtlasBaseException e) {
addError(e.getMessage());
}
return null;
} }
@Override @Override
public String getQualifiedName(GremlinQueryComposer.Context context, String name) { public String getQualifiedName(GremlinQueryComposer.Context context, String name) throws AtlasBaseException {
try { AtlasEntityType et = context.getActiveEntityType();
AtlasEntityType et = context.getActiveEntityType(); if (et == null) {
if(et == null) { return "";
return "";
}
return et.getQualifiedAttributeName(name);
} catch (AtlasBaseException e) {
addError(e.getMessage());
} }
return ""; return et.getQualifiedAttributeName(name);
} }
@Override @Override
...@@ -70,13 +59,29 @@ class RegistryBasedLookup implements Lookup { ...@@ -70,13 +59,29 @@ class RegistryBasedLookup implements Lookup {
return false; return false;
} }
AtlasType attr = et.getAttributeType(attributeName); AtlasType at = et.getAttributeType(attributeName);
if(attr == null) { if(at == null) {
return false; return false;
} }
TypeCategory attrTypeCategory = attr.getTypeCategory(); TypeCategory tc = at.getTypeCategory();
return (attrTypeCategory != null) && (attrTypeCategory == TypeCategory.PRIMITIVE || attrTypeCategory == TypeCategory.ENUM); if (isPrimitiveUsingTypeCategory(tc)) return true;
if ((tc != null) && (tc == TypeCategory.ARRAY)) {
AtlasArrayType ct = ((AtlasArrayType)at);
return isPrimitiveUsingTypeCategory(ct.getElementType().getTypeCategory());
}
if ((tc != null) && (tc == TypeCategory.MAP)) {
AtlasMapType ct = ((AtlasMapType)at);
return isPrimitiveUsingTypeCategory(ct.getValueType().getTypeCategory());
}
return false;
}
private boolean isPrimitiveUsingTypeCategory(TypeCategory tc) {
return ((tc != null) && (tc == TypeCategory.PRIMITIVE || tc == TypeCategory.ENUM));
} }
@Override @Override
...@@ -136,14 +141,27 @@ class RegistryBasedLookup implements Lookup { ...@@ -136,14 +141,27 @@ class RegistryBasedLookup implements Lookup {
} }
AtlasType at = attr.getAttributeType(); AtlasType at = attr.getAttributeType();
if(at.getTypeCategory() == TypeCategory.ARRAY) { switch (at.getTypeCategory()) {
AtlasArrayType arrType = ((AtlasArrayType)at); case ARRAY:
return ((AtlasBuiltInTypes.AtlasObjectIdType) arrType.getElementType()).getObjectType(); AtlasArrayType arrType = ((AtlasArrayType)at);
return getCollectionElementType(arrType.getElementType());
case MAP:
AtlasMapType mapType = ((AtlasMapType)at);
return getCollectionElementType(mapType.getValueType());
} }
return context.getActiveEntityType().getAttribute(item).getTypeName(); return context.getActiveEntityType().getAttribute(item).getTypeName();
} }
private String getCollectionElementType(AtlasType elemType) {
if(elemType.getTypeCategory() == TypeCategory.OBJECT_ID_TYPE) {
return ((AtlasBuiltInTypes.AtlasObjectIdType)elemType).getObjectType();
} else {
return elemType.getTypeName();
}
}
@Override @Override
public boolean isDate(GremlinQueryComposer.Context context, String attributeName) { public boolean isDate(GremlinQueryComposer.Context context, String attributeName) {
AtlasEntityType et = context.getActiveEntityType(); AtlasEntityType et = context.getActiveEntityType();
...@@ -155,13 +173,4 @@ class RegistryBasedLookup implements Lookup { ...@@ -155,13 +173,4 @@ class RegistryBasedLookup implements Lookup {
return attr != null && attr.getTypeName().equals(AtlasBaseTypeDef.ATLAS_TYPE_DATE); return attr != null && attr.getTypeName().equals(AtlasBaseTypeDef.ATLAS_TYPE_DATE);
} }
protected void addError(String s) {
errorList.add(s);
}
@Override
public List<String> getErrorList() {
return errorList;
}
} }
...@@ -24,6 +24,11 @@ import java.util.Map; ...@@ -24,6 +24,11 @@ import java.util.Map;
import java.util.StringJoiner; import java.util.StringJoiner;
class SelectClauseComposer { class SelectClauseComposer {
private static final String COUNT_STR = "count";
private static final String MIN_STR = "min";
private static final String MAX_STR = "max";
private static final String SUM_STR = "sum";
public boolean isSelectNoop; public boolean isSelectNoop;
private String[] labels; private String[] labels;
...@@ -50,16 +55,16 @@ class SelectClauseComposer { ...@@ -50,16 +55,16 @@ class SelectClauseComposer {
public boolean updateAsApplicable(int currentIndex, String qualifiedName) { public boolean updateAsApplicable(int currentIndex, String qualifiedName) {
boolean ret = false; boolean ret = false;
if (currentIndex == getCountIdx()) { if (currentIndex == getCountIdx()) {
ret = assign(currentIndex, "count", ret = assign(currentIndex, COUNT_STR,
GremlinClause.INLINE_COUNT.get(), GremlinClause.INLINE_ASSIGNMENT); GremlinClause.INLINE_COUNT.get(), GremlinClause.INLINE_ASSIGNMENT);
} else if (currentIndex == getMinIdx()) { } else if (currentIndex == getMinIdx()) {
ret = assign(currentIndex, "min", qualifiedName, ret = assign(currentIndex, MIN_STR, qualifiedName,
GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_MIN); GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_MIN);
} else if (currentIndex == getMaxIdx()) { } else if (currentIndex == getMaxIdx()) {
ret = assign(currentIndex, "max", qualifiedName, ret = assign(currentIndex, MAX_STR, qualifiedName,
GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_MAX); GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_MAX);
} else if (currentIndex == getSumIdx()) { } else if (currentIndex == getSumIdx()) {
ret = assign(currentIndex, "sum", qualifiedName, ret = assign(currentIndex, SUM_STR, qualifiedName,
GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_SUM); GremlinClause.INLINE_ASSIGNMENT, GremlinClause.INLINE_SUM);
} else { } else {
attributes[currentIndex] = qualifiedName; attributes[currentIndex] = qualifiedName;
...@@ -68,6 +73,13 @@ class SelectClauseComposer { ...@@ -68,6 +73,13 @@ class SelectClauseComposer {
return ret; return ret;
} }
public static boolean isKeyword(String s) {
return COUNT_STR.equals(s) ||
MIN_STR.equals(s) ||
MAX_STR.equals(s) ||
SUM_STR.equals(s);
}
public String[] getAttributes() { public String[] getAttributes() {
return attributes; return attributes;
} }
......
...@@ -116,6 +116,7 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -116,6 +116,7 @@ public class DSLQueriesTest extends BasicTestSetup {
return new Object[][]{ return new Object[][]{
{"from hive_db", 3}, {"from hive_db", 3},
{"hive_db", 3}, {"hive_db", 3},
{"hive_db as d select d", 3},
{"hive_db where hive_db.name=\"Reporting\"", 1}, {"hive_db where hive_db.name=\"Reporting\"", 1},
{"hive_db where hive_db.name=\"Reporting\" select name, owner", 1}, {"hive_db where hive_db.name=\"Reporting\" select name, owner", 1},
{"hive_db has name", 3}, {"hive_db has name", 3},
...@@ -133,7 +134,7 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -133,7 +134,7 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where hive_db is JdbcAccess", 0}, {"hive_db where hive_db is JdbcAccess", 0},
{"hive_db where hive_db has name", 3}, {"hive_db where hive_db has name", 3},
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0}, {"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01T0:0:0.0Z\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
{"Dimension", 5}, {"Dimension", 5},
{"JdbcAccess", 2}, {"JdbcAccess", 2},
...@@ -141,8 +142,6 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -141,8 +142,6 @@ public class DSLQueriesTest extends BasicTestSetup {
{"Metric", 5}, {"Metric", 5},
{"PII", 4}, {"PII", 4},
{"`Log Data`", 3}, {"`Log Data`", 3},
{"`isa`", 0},
{"hive_table as t, sd, hive_column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType", 0},
{"DataSet where name='sales_fact'", 1}, {"DataSet where name='sales_fact'", 1},
{"Asset where name='sales_fact'", 1} {"Asset where name='sales_fact'", 1}
}; };
...@@ -151,7 +150,7 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -151,7 +150,7 @@ public class DSLQueriesTest extends BasicTestSetup {
@Test(dataProvider = "basicProvider") @Test(dataProvider = "basicProvider")
public void basic(String query, int expected) throws AtlasBaseException { public void basic(String query, int expected) throws AtlasBaseException {
queryAssert(query, expected); queryAssert(query, expected);
//queryAssert(query.replace("where", " "), expected); queryAssert(query.replace("where", " "), expected);
} }
private void queryAssert(String query, int expected) throws AtlasBaseException { private void queryAssert(String query, int expected) throws AtlasBaseException {
...@@ -249,10 +248,10 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -249,10 +248,10 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0}, {"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01T0:0:0.0Z\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01T0:0:0.0Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01T0:0:0.0Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 5", 0}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01T0:0:0.0Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 5", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
...@@ -332,10 +331,10 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -332,10 +331,10 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 0", 2, "owner", true}, {"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 0", 2, "owner", true},
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 1", 2, "owner", true}, {"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 1", 2, "owner", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime ", 1, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01T0:0:0.0Z\" ) select name as _col_0, createTime as _col_1 orderby createTime ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 ", 1, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01T0:0:0.0Z\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 0", 1, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01T0:0:0.0Z\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 0", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 5", 0, "_col_1", true}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01T0:0:0.0Z\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 5", 0, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name ", 1, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name ", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 0", 1, "_col_0", true}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 0", 1, "_col_0", true},
...@@ -469,8 +468,6 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -469,8 +468,6 @@ public class DSLQueriesTest extends BasicTestSetup {
new FieldValueValidator() new FieldValueValidator()
.withFieldNames("'count'", "'sum'") .withFieldNames("'count'", "'sum'")
.withExpectedValues(4, 86) }, .withExpectedValues(4, 86) },
// tests to ensure that group by works with order by and limit
// FIXME:
// { "from hive_db groupby (owner) select min(name) orderby name limit 2 ", // { "from hive_db groupby (owner) select min(name) orderby name limit 2 ",
// new FieldValueValidator() // new FieldValueValidator()
// .withFieldNames("min(name)") // .withFieldNames("min(name)")
...@@ -490,6 +487,22 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -490,6 +487,22 @@ public class DSLQueriesTest extends BasicTestSetup {
queryAssert(query.replace("where", " "), fv); queryAssert(query.replace("where", " "), fv);
} }
@DataProvider(name = "errorQueriesProvider")
private Object[][] errorQueries() {
return new Object[][]{
{"`isa`"},
{"PIII"},
{"DBBB as d select d"},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11\" ) select name as _col_0, createTime as _col_1 orderby name limit 0 offset 1"},
{"hive_table as t, sd, hive_column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType"}
};
}
@Test(dataProvider = "errorQueriesProvider", expectedExceptions = { AtlasBaseException.class })
public void errorQueries(String query) throws AtlasBaseException {
discoveryService.searchUsingDslQuery(query, 25, 0);
}
private void queryAssert(String query, FieldValueValidator fv) throws AtlasBaseException { private void queryAssert(String query, FieldValueValidator fv) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0); AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, fv); assertSearchResult(searchResult, fv);
......
...@@ -76,6 +76,7 @@ public class GremlinQueryComposerTest { ...@@ -76,6 +76,7 @@ public class GremlinQueryComposerTest {
"f(g.V().has('__typeName', 'DB').as('d')"; "f(g.V().has('__typeName', 'DB').as('d')";
verify("DB as d select d.name, d.owner", expected + ".limit(25).toList())"); verify("DB as d select d.name, d.owner", expected + ".limit(25).toList())");
verify("DB as d select d.name, d.owner limit 10", expected + ".limit(10).toList())"); verify("DB as d select d.name, d.owner limit 10", expected + ".limit(10).toList())");
verify("DB as d select d","def f(r){ r }; f(g.V().has('__typeName', 'DB').as('d').limit(25).toList())");
} }
@Test @Test
...@@ -178,8 +179,8 @@ public class GremlinQueryComposerTest { ...@@ -178,8 +179,8 @@ public class GremlinQueryComposerTest {
@Test @Test
public void whereClauseWithDateCompare() { public void whereClauseWithDateCompare() {
String exSel = "def f(r){ t=[['t.name','t.owner']]; r.each({t.add([it.value('Table.name'),it.value('Table.owner')])}); t.unique(); }"; String exSel = "def f(r){ t=[['t.name','t.owner']]; r.each({t.add([it.value('Table.name'),it.value('Table.owner')])}); t.unique(); }";
String exMain = "g.V().has('__typeName', 'Table').as('t').has('Table.createdTime', eq('1513046158440')).limit(25).toList()"; String exMain = "g.V().has('__typeName', 'Table').as('t').has('Table.createTime', eq('1513046158440')).limit(25).toList()";
verify("Table as t where t.createdTime = \"2017-12-12T02:35:58.440Z\" select t.name, t.owner)", getExpected(exSel, exMain)); verify("Table as t where t.createTime = \"2017-12-12T02:35:58.440Z\" select t.name, t.owner)", getExpected(exSel, exMain));
} }
@Test @Test
...@@ -277,8 +278,8 @@ public class GremlinQueryComposerTest { ...@@ -277,8 +278,8 @@ public class GremlinQueryComposerTest {
"__.has('Table.owner', eq(\"Joe BI\"))" + "__.has('Table.owner', eq(\"Joe BI\"))" +
"))" + "))" +
".limit(25).toList()"}, ".limit(25).toList()"},
{"Table where owner=\"hdfs\" or ((name=\"testtable_1\" or name=\"testtable_2\") and createdTime < \"2017-12-12T02:35:58.440Z\")", {"Table where owner=\"hdfs\" or ((name=\"testtable_1\" or name=\"testtable_2\") and createTime < \"2017-12-12T02:35:58.440Z\")",
"g.V().has('__typeName', 'Table').or(__.has('Table.owner', eq(\"hdfs\")),__.and(__.or(__.has('Table.name', eq(\"testtable_1\")),__.has('Table.name', eq(\"testtable_2\"))),__.has('Table.createdTime', lt('1513046158440')))).limit(25).toList()"}, "g.V().has('__typeName', 'Table').or(__.has('Table.owner', eq(\"hdfs\")),__.and(__.or(__.has('Table.name', eq(\"testtable_1\")),__.has('Table.name', eq(\"testtable_2\"))),__.has('Table.createTime', lt('1513046158440')))).limit(25).toList()"},
{"hive_db where hive_db.name='Reporting' and hive_db.createTime < '2017-12-12T02:35:58.440Z'", {"hive_db where hive_db.name='Reporting' and hive_db.createTime < '2017-12-12T02:35:58.440Z'",
"g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', eq('Reporting')),__.has('hive_db.createTime', lt('1513046158440'))).limit(25).toList()"}, "g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', eq('Reporting')),__.has('hive_db.createTime', lt('1513046158440'))).limit(25).toList()"},
{"Table where db.name='Sales' and db.clusterName='cl1'", {"Table where db.name='Sales' and db.clusterName='cl1'",
...@@ -293,9 +294,10 @@ public class GremlinQueryComposerTest { ...@@ -293,9 +294,10 @@ public class GremlinQueryComposerTest {
} }
@Test @Test
public void hasInWhereClause() { public void keywordsInWhereClause() {
verify("Table as t where t has name and t isa Dimension", verify("Table as t where t has name and t isa Dimension",
"g.V().has('__typeName', 'Table').as('t').and(__.has('Table.name'),__.has('__traitNames', within('Dimension'))).limit(25).toList()"); "g.V().has('__typeName', 'Table').as('t').and(__.has('Table.name'),__.has('__traitNames', within('Dimension'))).limit(25).toList()");
verify("Table as t where t has name and t.name = 'sales_fact'", verify("Table as t where t has name and t.name = 'sales_fact'",
"g.V().has('__typeName', 'Table').as('t').and(__.has('Table.name'),__.has('Table.name', eq('sales_fact'))).limit(25).toList()"); "g.V().has('__typeName', 'Table').as('t').and(__.has('Table.name'),__.has('Table.name', eq('sales_fact'))).limit(25).toList()");
verify("Table as t where t is Dimension and t.name = 'sales_fact'", verify("Table as t where t is Dimension and t.name = 'sales_fact'",
...@@ -306,6 +308,7 @@ public class GremlinQueryComposerTest { ...@@ -306,6 +308,7 @@ public class GremlinQueryComposerTest {
@Test @Test
public void invalidQueries() { public void invalidQueries() {
verify("hdfs_path like h1", ""); verify("hdfs_path like h1", "");
// verify("hdfs_path select xxx", "");
} }
private void verify(String dsl, String expectedGremlin) { private void verify(String dsl, String expectedGremlin) {
...@@ -332,7 +335,7 @@ public class GremlinQueryComposerTest { ...@@ -332,7 +335,7 @@ public class GremlinQueryComposerTest {
private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) { private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) {
AtlasTypeRegistry registry = mock(AtlasTypeRegistry.class); AtlasTypeRegistry registry = mock(AtlasTypeRegistry.class);
org.apache.atlas.query.Lookup lookup = new TestLookup(errorList, registry); org.apache.atlas.query.Lookup lookup = new TestLookup(errorList, registry);
GremlinQueryComposer.Context context = new GremlinQueryComposer.Context(errorList, lookup); GremlinQueryComposer.Context context = new GremlinQueryComposer.Context(lookup);
AtlasDSL.QueryMetadata queryMetadata = new AtlasDSL.QueryMetadata(queryContext); AtlasDSL.QueryMetadata queryMetadata = new AtlasDSL.QueryMetadata(queryContext);
GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(lookup, context, queryMetadata); GremlinQueryComposer gremlinQueryComposer = new GremlinQueryComposer(lookup, context, queryMetadata);
...@@ -340,6 +343,7 @@ public class GremlinQueryComposerTest { ...@@ -340,6 +343,7 @@ public class GremlinQueryComposerTest {
qv.visit(queryContext); qv.visit(queryContext);
String s = gremlinQueryComposer.get(); String s = gremlinQueryComposer.get();
assertEquals(gremlinQueryComposer.getErrorList().size(), 0);
return s; return s;
} }
...@@ -367,18 +371,24 @@ public class GremlinQueryComposerTest { ...@@ -367,18 +371,24 @@ public class GremlinQueryComposerTest {
} }
@Override @Override
public String getQualifiedName(GremlinQueryComposer.Context context, String name) { public String getQualifiedName(GremlinQueryComposer.Context context, String name) throws AtlasBaseException {
if(!hasAttribute(context, name)) {
throw new AtlasBaseException("Invalid attribute");
}
if(name.contains(".")) if(name.contains("."))
return name; return name;
return String.format("%s.%s", context.getActiveTypeName(), name); if(!context.getActiveTypeName().equals(name))
return String.format("%s.%s", context.getActiveTypeName(), name);
else
return name;
} }
@Override @Override
public boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName) { public boolean isPrimitive(GremlinQueryComposer.Context context, String attributeName) {
return attributeName.equals("name") || return attributeName.equals("name") ||
attributeName.equals("owner") || attributeName.equals("owner") ||
attributeName.equals("createdTime") ||
attributeName.equals("createTime") || attributeName.equals("createTime") ||
attributeName.equals("clusterName"); attributeName.equals("clusterName");
} }
...@@ -394,9 +404,22 @@ public class GremlinQueryComposerTest { ...@@ -394,9 +404,22 @@ public class GremlinQueryComposerTest {
} }
@Override @Override
public boolean hasAttribute(GremlinQueryComposer.Context context, String typeName) { public boolean hasAttribute(GremlinQueryComposer.Context context, String attributeName) {
return (context.getActiveTypeName().equals("Table") && typeName.equals("db")) || return (context.getActiveTypeName().equals("Table") && attributeName.equals("db")) ||
(context.getActiveTypeName().equals("Table") && typeName.equals("columns")); (context.getActiveTypeName().equals("Table") && attributeName.equals("columns")) ||
(context.getActiveTypeName().equals("Table") && attributeName.equals("createTime")) ||
(context.getActiveTypeName().equals("Table") && attributeName.equals("name")) ||
(context.getActiveTypeName().equals("Table") && attributeName.equals("owner")) ||
(context.getActiveTypeName().equals("Table") && attributeName.equals("clusterName")) ||
(context.getActiveTypeName().equals("Table") && attributeName.equals("isFile")) ||
(context.getActiveTypeName().equals("hive_db") && attributeName.equals("name")) ||
(context.getActiveTypeName().equals("hive_db") && attributeName.equals("owner")) ||
(context.getActiveTypeName().equals("hive_db") && attributeName.equals("createTime")) ||
(context.getActiveTypeName().equals("DB") && attributeName.equals("name")) ||
(context.getActiveTypeName().equals("DB") && attributeName.equals("owner")) ||
(context.getActiveTypeName().equals("DB") && attributeName.equals("clusterName")) ||
(context.getActiveTypeName().equals("Asset") && attributeName.equals("name")) ||
(context.getActiveTypeName().equals("Asset") && attributeName.equals("owner"));
} }
@Override @Override
...@@ -425,19 +448,15 @@ public class GremlinQueryComposerTest { ...@@ -425,19 +448,15 @@ public class GremlinQueryComposerTest {
return "DB"; return "DB";
} else if(context.getActiveTypeName().equals("Table") && item.equals("columns")) { } else if(context.getActiveTypeName().equals("Table") && item.equals("columns")) {
return "Column"; return "Column";
} else if(context.getActiveTypeName().equals(item)) {
return null;
} }
return context.getActiveTypeName(); return context.getActiveTypeName();
} }
@Override @Override
public boolean isDate(GremlinQueryComposer.Context context, String attributeName) { public boolean isDate(GremlinQueryComposer.Context context, String attributeName) {
return attributeName.equals("createdTime") || return attributeName.equals("createTime");
attributeName.equals("createTime");
}
@Override
public List<String> getErrorList() {
return errorList;
} }
} }
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment