Commit f28d0f54 by Ashutosh Mestry Committed by apoorvnaik

ATLAS-2356: Optional where clause.

parent ecf8095f
......@@ -35,7 +35,6 @@ import org.apache.atlas.model.profile.AtlasUserSavedSearch;
import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.query.GremlinQuery;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper;
......@@ -68,7 +67,6 @@ import javax.script.Bindings;
import javax.script.ScriptEngine;
import javax.script.ScriptException;
import java.util.*;
import java.util.stream.Stream;
import static org.apache.atlas.AtlasErrorCode.CLASSIFICATION_NOT_FOUND;
import static org.apache.atlas.AtlasErrorCode.DISCOVERY_QUERY_FAILED;
......@@ -897,27 +895,16 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
@Override
public String getDslQueryUsingTypeNameClassification(String query, String typeName, String classification) {
final String whereDSLKeyword = "where";
final String[] keywords = new String[]{whereDSLKeyword, "isa", "is", "limit", "orderby", "has"};
final String whereFormat = whereDSLKeyword + " %s";
String queryStr = query == null ? "" : query;
if (StringUtils.isNotEmpty(typeName)) {
if(StringUtils.isNotEmpty(query)) {
String s = query.toLowerCase();
if(!Stream.of(keywords).anyMatch(x -> s.startsWith(x))) {
queryStr = String.format(whereFormat, query);
}
}
queryStr = escapeTypeName(typeName) + " " + queryStr;
}
if (StringUtils.isNotEmpty(classification)) {
// isa works with a type name only - like hive_column isa PII; it doesn't work with more complex query
if (StringUtils.isEmpty(query)) {
queryStr += String.format("%s %s %s", queryStr, "isa", classification);
queryStr += (" isa " + classification);
}
}
return queryStr;
......
......@@ -17,6 +17,7 @@
*/
package org.apache.atlas.query;
import com.google.common.annotations.VisibleForTesting;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
......@@ -53,7 +54,8 @@ public class AtlasDSL {
return RESERVED_KEYWORDS.contains(word);
}
private static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
@VisibleForTesting
static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
AtlasDSLParser.QueryContext ret;
try {
......
......@@ -24,9 +24,9 @@ import org.apache.commons.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.*;
import static org.apache.atlas.query.antlr4.AtlasDSLParser.RULE_whereClause;
public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
private static final Logger LOG = LoggerFactory.getLogger(DSLVisitor.class);
......@@ -34,6 +34,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
private static final String AND = "AND";
private static final String OR = "OR";
private Set<Integer> visitedRuleIndexes = new HashSet<>();
private final GremlinQueryComposer gremlinQueryComposer;
public DSLVisitor(GremlinQueryComposer gremlinQueryComposer) {
......@@ -41,12 +42,27 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
}
@Override
public Void visitSpaceDelimitedQueries(SpaceDelimitedQueriesContext ctx) {
addVisitedRule(ctx.getRuleIndex());
return super.visitSpaceDelimitedQueries(ctx);
}
@Override
public Void visitCommaDelimitedQueries(CommaDelimitedQueriesContext ctx) {
addVisitedRule(ctx.getRuleIndex());
return super.visitCommaDelimitedQueries(ctx);
}
@Override
public Void visitIsClause(IsClauseContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitIsClause({})", ctx);
}
gremlinQueryComposer.addFromIsA(ctx.arithE().getText(), ctx.identifier().getText());
if(!hasVisitedRule(RULE_whereClause)) {
gremlinQueryComposer.addFromIsA(ctx.arithE().getText(), ctx.identifier().getText());
}
return super.visitIsClause(ctx);
}
......@@ -56,7 +72,10 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
LOG.debug("=> DSLVisitor.visitHasClause({})", ctx);
}
gremlinQueryComposer.addFromProperty(ctx.arithE().getText(), ctx.identifier().getText());
if(!hasVisitedRule(RULE_whereClause)) {
gremlinQueryComposer.addFromProperty(ctx.arithE().getText(), ctx.identifier().getText());
}
return super.visitHasClause(ctx);
}
......@@ -140,10 +159,8 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
LOG.debug("=> DSLVisitor.visitWhereClause({})", ctx);
}
// The first expr shouldn't be processed if there are following exprs
addVisitedRule(ctx.getRuleIndex());
ExprContext expr = ctx.expr();
processExpr(expr, gremlinQueryComposer);
return super.visitWhereClause(ctx);
}
......@@ -170,6 +187,19 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
}
@Override
public Void visitSingleQrySrc(SingleQrySrcContext ctx) {
if (!hasVisitedRule(RULE_whereClause)) {
if (ctx.fromExpression() == null) {
if (ctx.expr() != null && gremlinQueryComposer.hasFromClause()) {
processExpr(ctx.expr(), gremlinQueryComposer);
}
}
}
return super.visitSingleQrySrc(ctx);
}
@Override
public Void visitGroupByExpression(GroupByExpressionContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitGroupByExpression({})", ctx);
......@@ -238,11 +268,11 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
}
private void processExpr(final CompEContext compE, final GremlinQueryComposer gremlinQueryComposer) {
if (compE != null && compE.isClause() == null && compE.hasClause() == null && compE.isClause() == null) {
if (compE != null && compE.isClause() == null && compE.hasClause() == null) {
ComparisonClauseContext comparisonClause = compE.comparisonClause();
// The nested expression might have ANDs/ORs
if(comparisonClause == null) {
if (comparisonClause == null) {
ExprContext exprContext = compE.arithE().multiE().atomE().expr();
// Only extract comparison clause if there are no nested exprRight clauses
if (CollectionUtils.isEmpty(exprContext.exprRight())) {
......@@ -254,7 +284,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
String lhs = comparisonClause.arithE(0).getText();
String op, rhs;
AtomEContext atomECtx = comparisonClause.arithE(1).multiE().atomE();
if(atomECtx.literal() == null ||
if (atomECtx.literal() == null ||
(atomECtx.literal() != null && atomECtx.literal().valueArray() == null)) {
op = comparisonClause.operator().getText().toUpperCase();
rhs = comparisonClause.arithE(1).getText();
......@@ -268,6 +298,14 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
processExpr(compE.arithE().multiE().atomE().expr(), gremlinQueryComposer);
}
}
if (compE != null && compE.isClause() != null) {
gremlinQueryComposer.addFromIsA(compE.isClause().arithE().getText(), compE.isClause().identifier().getText());
}
if (compE != null && compE.hasClause() != null) {
gremlinQueryComposer.addFromProperty(compE.hasClause().arithE().getText(), compE.hasClause().identifier().getText());
}
}
private String getInClause(AtomEContext atomEContext) {
......@@ -281,4 +319,12 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
return sb.toString();
}
private void addVisitedRule(int ruleIndex) {
visitedRuleIndexes.add(ruleIndex);
}
private boolean hasVisitedRule(int ruleIndex) {
return visitedRuleIndexes.contains(ruleIndex);
}
}
......@@ -56,16 +56,13 @@ public class GremlinQueryComposer {
private int providedOffset = DEFAULT_QUERY_RESULT_OFFSET;
private Context context;
private static final ThreadLocal<DateFormat> DSL_DATE_FORMAT = new ThreadLocal<DateFormat>() {
@Override
public DateFormat initialValue() {
DateFormat ret = new SimpleDateFormat(ISO8601_FORMAT);
private static final ThreadLocal<DateFormat> DSL_DATE_FORMAT = ThreadLocal.withInitial(() -> {
DateFormat ret = new SimpleDateFormat(ISO8601_FORMAT);
ret.setTimeZone(TimeZone.getTimeZone("UTC"));
ret.setTimeZone(TimeZone.getTimeZone("UTC"));
return ret;
}
};
return ret;
});
public GremlinQueryComposer(Lookup registryLookup, final AtlasDSL.QueryMetadata qmd, boolean isNestedQuery) {
this.isNestedQuery = isNestedQuery;
......@@ -124,13 +121,19 @@ public class GremlinQueryComposer {
LOG.debug("addFromProperty(typeName={}, attribute={})", typeName, attribute);
}
addFrom(typeName);
if(!isNestedQuery) {
addFrom(typeName);
}
add(GremlinClause.HAS_PROPERTY,
IdentifierHelper.getQualifiedName(lookup, context, attribute));
}
public void addFromIsA(String typeName, String traitName) {
addFrom(typeName);
if (!isNestedQuery) {
addFrom(typeName);
}
add(GremlinClause.TRAIT, traitName);
}
......@@ -284,6 +287,15 @@ public class GremlinQueryComposer {
return s;
}
public List<String> getErrorList() {
combineErrorLists();
return errorList;
}
private void combineErrorLists() {
errorList.addAll(context.getErrorList());
}
private String getTransformedClauses(String[] items) {
String ret;
String body = String.join(".", Stream.of(items).filter(Objects::nonNull).collect(Collectors.toList()));
......@@ -406,7 +418,7 @@ public class GremlinQueryComposer {
}
private void moveToLast(GremlinClause clause) {
int index = queryClauses.hasClause(clause);
int index = queryClauses.contains(clause);
if (-1 == index) {
return;
}
......@@ -475,6 +487,11 @@ public class GremlinQueryComposer {
add(GremlinClause.GROUP_BY, ia.getQualifiedName());
}
public boolean hasFromClause() {
return queryClauses.contains(GremlinClause.HAS_TYPE) != -1 ||
queryClauses.contains(GremlinClause.HAS_TYPE_WITHIN) != -1;
}
private static class GremlinClauseValue {
private final GremlinClause clause;
private final String value;
......@@ -540,7 +557,7 @@ public class GremlinQueryComposer {
return list.size();
}
public int hasClause(GremlinClause clause) {
public int contains(GremlinClause clause) {
for (int i = 0; i < list.size(); i++) {
if (list.get(i).getClause() == clause)
return i;
......@@ -640,5 +657,10 @@ public class GremlinQueryComposer {
aliasMap.put(alias, typeName);
}
public List<String> getErrorList() {
errorList.addAll(lookup.getErrorList());
return errorList;
}
}
}
......@@ -20,6 +20,9 @@ package org.apache.atlas.query;
import org.apache.atlas.type.AtlasType;
import java.util.Collection;
import java.util.List;
public interface Lookup {
AtlasType getType(String typeName);
......@@ -40,4 +43,6 @@ public interface Lookup {
String getTypeFromEdge(GremlinQueryComposer.Context context, String item);
boolean isDate(GremlinQueryComposer.Context context, String attributeName);
List<String> getErrorList();
}
......@@ -159,4 +159,9 @@ class RegistryBasedLookup implements Lookup {
protected void addError(String s) {
errorList.add(s);
}
@Override
public List<String> getErrorList() {
return errorList;
}
}
......@@ -17,19 +17,12 @@
*/
package org.apache.atlas.query;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.TokenStream;
import org.apache.atlas.TestModules;
import org.apache.atlas.discovery.EntityDiscoveryService;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.discovery.AtlasSearchResult;
import org.apache.atlas.query.antlr4.AtlasDSLLexer;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.runner.LocalSolrRunner;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.testng.ITestContext;
import org.testng.annotations.*;
import javax.inject.Inject;
......@@ -38,13 +31,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
......@@ -120,6 +106,9 @@ public class DSLQueriesTest extends BasicTestSetup {
public void comparison(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, expected);
AtlasSearchResult searchResult2 = discoveryService.searchUsingDslQuery(query.replace("where", " "), 25, 0);
assertSearchResult(searchResult2, expected);
}
@DataProvider(name = "basicProvider")
......@@ -128,7 +117,6 @@ public class DSLQueriesTest extends BasicTestSetup {
{"from hive_db", 3},
{"hive_db", 3},
{"hive_db where hive_db.name=\"Reporting\"", 1},
{"hive_db hive_db.name = \"Reporting\"", 3},
{"hive_db where hive_db.name=\"Reporting\" select name, owner", 1},
{"hive_db has name", 3},
{"from hive_table", 10},
......@@ -155,8 +143,6 @@ public class DSLQueriesTest extends BasicTestSetup {
{"`Log Data`", 3},
{"`isa`", 0},
{"hive_table as t, sd, hive_column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType", 0},
{"hive_table where name='sales_fact', db where name='Sales'", 1},
{"hive_table where name='sales_fact', db where name='Reporting'", 0},
{"DataSet where name='sales_fact'", 1},
{"Asset where name='sales_fact'", 1}
};
......@@ -164,6 +150,11 @@ public class DSLQueriesTest extends BasicTestSetup {
@Test(dataProvider = "basicProvider")
public void basic(String query, int expected) throws AtlasBaseException {
queryAssert(query, expected);
//queryAssert(query.replace("where", " "), expected);
}
private void queryAssert(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, expected);
}
......@@ -182,8 +173,8 @@ public class DSLQueriesTest extends BasicTestSetup {
@Test(dataProvider = "limitProvider")
public void limit(String query, int expected, int limit, int offset) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, limit, offset);
assertSearchResult(searchResult, expected);
queryAssert(query, expected);
queryAssert(query.replace("where", " "), expected);
}
@DataProvider(name = "syntaxVerifierProvider")
......@@ -268,25 +259,16 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 0 offset 1", 0},
{"hive_table where name='sales_fact', db where name='Sales'", 1},
{"hive_table where name='sales_fact', db where name='Sales' limit 10", 1},
{"hive_table where name='sales_fact', db where name='Sales' limit 10 offset 1", 0},
{"hive_table where name='sales_fact', db where name='Reporting'", 0},
{"hive_table where name='sales_fact', db where name='Reporting' limit 10", 0},
{"hive_table where name='sales_fact', db where name='Reporting' limit 10 offset 1", 0},
{"hive_db as d where owner = ['John ETL', 'Jane BI']", 2},
{"hive_db as d where owner = ['John ETL', 'Jane BI'] limit 10", 2},
{"hive_db as d where owner = ['John ETL', 'Jane BI'] limit 10 offset 1", 1},
};
}
@Test(dataProvider = "syntaxVerifierProvider")
public void syntax(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, expected);
queryAssert(query, expected);
queryAssert(query.replace("where", " "), expected);
}
@DataProvider(name = "orderByProvider")
......@@ -365,8 +347,8 @@ public class DSLQueriesTest extends BasicTestSetup {
@Test(dataProvider = "orderByProvider")
public void orderBy(String query, int expected, String orderBy, boolean ascending) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, expected);
queryAssert(query, expected);
queryAssert(query.replace("where", " "), expected);
}
@DataProvider(name = "likeQueriesProvider")
......@@ -376,13 +358,18 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where name like \"R*\"", 1},
{"hive_db where hive_db.name like \"R???rt?*\" or hive_db.name like \"S?l?s\" or hive_db.name like\"Log*\"", 3},
{"hive_db where hive_db.name like \"R???rt?*\" and hive_db.name like \"S?l?s\" and hive_db.name like\"Log*\"", 0},
{"hive_table where name like 'sales*', db where name like 'Sa?es'", 1},
{"hive_table where name like 'sales*' and db.name like 'Sa?es'", 1},
{"hive_table where db.name like \"Sa*\"", 4},
{"hive_table where db.name like \"Sa*\" and name like \"*dim\"", 3},
};
}
@Test(dataProvider = "likeQueriesProvider")
public void likeQueries(String query, int expected) throws AtlasBaseException {
queryAssert(query, expected);
queryAssert(query.replace("where", " "), expected);
}
@DataProvider(name = "minMaxCountProvider")
private Object[][] minMaxCountQueries() {
return new Object[][]{
......@@ -499,14 +486,26 @@ public class DSLQueriesTest extends BasicTestSetup {
@Test(dataProvider = "minMaxCountProvider")
public void minMaxCount(String query, FieldValueValidator fv) throws AtlasBaseException {
queryAssert(query, fv);
queryAssert(query.replace("where", " "), fv);
}
private void queryAssert(String query, FieldValueValidator fv) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, fv);
}
@Test(dataProvider = "likeQueriesProvider")
public void likeQueries(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, expected);
private void assertSearchResult(AtlasSearchResult searchResult, FieldValueValidator expected) {
assertNotNull(searchResult);
assertNull(searchResult.getEntities());
assertEquals(searchResult.getAttributes().getName().size(), expected.getFieldNamesCount());
for (int i = 0; i < searchResult.getAttributes().getName().size(); i++) {
String s = searchResult.getAttributes().getName().get(i);
assertEquals(s, expected.fieldNames[i]);
}
assertEquals(searchResult.getAttributes().getValues().size(), expected.values.size());
}
private void assertSearchResult(AtlasSearchResult searchResult, int expected) {
......@@ -523,19 +522,6 @@ public class DSLQueriesTest extends BasicTestSetup {
}
}
private void assertSearchResult(AtlasSearchResult searchResult, FieldValueValidator expected) {
assertNotNull(searchResult);
assertNull(searchResult.getEntities());
assertEquals(searchResult.getAttributes().getName().size(), expected.getFieldNamesCount());
for (int i = 0; i < searchResult.getAttributes().getName().size(); i++) {
String s = searchResult.getAttributes().getName().get(i);
assertEquals(s, expected.fieldNames[i]);
}
assertEquals(searchResult.getAttributes().getValues().size(), expected.values.size());
}
private class FieldValueValidator {
class ResultObject {
Map<String, Object> fieldValues = new HashMap<>();
......
......@@ -17,29 +17,22 @@
*/
package org.apache.atlas.query;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.TokenStream;
import org.apache.atlas.query.antlr4.AtlasDSLLexer;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.commons.lang.StringUtils;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.assertFalse;
public class GremlinQueryComposerTest {
private List<String> errorList = new ArrayList<>();
......@@ -103,6 +96,8 @@ public class GremlinQueryComposerTest {
@Test
public void valueArray() {
verify("DB where owner = ['hdfs', 'anon']", "g.V().has('__typeName', 'DB').has('DB.owner', within('hdfs','anon')).limit(25).toList()");
verify("DB owner = ['hdfs', 'anon']", "g.V().has('__typeName', 'DB').has('DB.owner', within('hdfs','anon')).limit(25).toList()");
verify("hive_db as d owner = ['hdfs', 'anon']", "g.V().has('__typeName', 'hive_db').as('d').has('hive_db.owner', within('hdfs','anon')).limit(25).toList()");
}
@Test
public void groupByMin() {
......@@ -222,71 +217,90 @@ public class GremlinQueryComposerTest {
verify(String.format(queryFormat, "FALSE"), String.format(expectedFormat, "FALSE"));
}
@DataProvider(name = "nestedQueriesProvider")
private Object[][] nestedQueriesSource() {
return new Object[][]{
{"Table where name=\"sales_fact\" or name=\"testtable_1\"",
"g.V().has('__typeName', 'Table').or(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.name', eq(\"testtable_1\"))).limit(25).toList()"},
{"Table where name=\"sales_fact\" and name=\"testtable_1\"",
"g.V().has('__typeName', 'Table').and(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.name', eq(\"testtable_1\"))).limit(25).toList()"},
{"Table where name=\"sales_fact\" or name=\"testtable_1\" or name=\"testtable_2\"",
"g.V().has('__typeName', 'Table')" +
".or(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.has('Table.name', eq(\"testtable_1\"))," +
"__.has('Table.name', eq(\"testtable_2\"))" +
").limit(25).toList()"},
{"Table where name=\"sales_fact\" and name=\"testtable_1\" and name=\"testtable_2\"",
"g.V().has('__typeName', 'Table')" +
".and(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.has('Table.name', eq(\"testtable_1\"))," +
"__.has('Table.name', eq(\"testtable_2\"))" +
").limit(25).toList()"},
{"Table where (name=\"sales_fact\" or name=\"testtable_1\") and name=\"testtable_2\"",
"g.V().has('__typeName', 'Table')" +
".and(" +
"__.or(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.has('Table.name', eq(\"testtable_1\"))" +
")," +
"__.has('Table.name', eq(\"testtable_2\")))" +
".limit(25).toList()"},
{"Table where name=\"sales_fact\" or (name=\"testtable_1\" and name=\"testtable_2\")",
"g.V().has('__typeName', 'Table')" +
".or(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.and(" +
"__.has('Table.name', eq(\"testtable_1\"))," +
"__.has('Table.name', eq(\"testtable_2\")))" +
")" +
".limit(25).toList()"},
{"Table where name=\"sales_fact\" or name=\"testtable_1\" and name=\"testtable_2\"",
"g.V().has('__typeName', 'Table')" +
".and(" +
"__.or(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.has('Table.name', eq(\"testtable_1\"))" +
")," +
"__.has('Table.name', eq(\"testtable_2\")))" +
".limit(25).toList()"},
{"Table where (name=\"sales_fact\" and owner=\"Joe\") OR (name=\"sales_fact_daily_mv\" and owner=\"Joe BI\")",
"g.V().has('__typeName', 'Table')" +
".or(" +
"__.and(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.has('Table.owner', eq(\"Joe\"))" +
")," +
"__.and(" +
"__.has('Table.name', eq(\"sales_fact_daily_mv\"))," +
"__.has('Table.owner', eq(\"Joe BI\"))" +
"))" +
".limit(25).toList()"},
{"Table where owner=\"hdfs\" or ((name=\"testtable_1\" or name=\"testtable_2\") and createdTime < \"2017-12-12T02:35:58.440Z\")",
"g.V().has('__typeName', 'Table').or(__.has('Table.owner', eq(\"hdfs\")),__.and(__.or(__.has('Table.name', eq(\"testtable_1\")),__.has('Table.name', eq(\"testtable_2\"))),__.has('Table.createdTime', lt('1513046158440')))).limit(25).toList()"},
{"hive_db where hive_db.name='Reporting' and hive_db.createTime < '2017-12-12T02:35:58.440Z'",
"g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', eq('Reporting')),__.has('hive_db.createTime', lt('1513046158440'))).limit(25).toList()"},
{"Table where db.name='Sales' and db.clusterName='cl1'",
"g.V().has('__typeName', 'Table').and(__.out('__Table.db').has('DB.name', eq('Sales')).dedup().in('__Table.db'),__.out('__Table.db').has('DB.clusterName', eq('cl1')).dedup().in('__Table.db')).limit(25).toList()"},
};
}
@Test(dataProvider = "nestedQueriesProvider")
public void nestedQueries(String query, String expectedGremlin) {
verify(query, expectedGremlin);
verify(query.replace("where", " "), expectedGremlin);
}
@Test
public void nestedQueries() {
verify("Table where name=\"sales_fact\" or name=\"testtable_1\"",
"g.V().has('__typeName', 'Table').or(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.name', eq(\"testtable_1\"))).limit(25).toList()");
verify("Table where name=\"sales_fact\" and name=\"testtable_1\"",
"g.V().has('__typeName', 'Table').and(__.has('Table.name', eq(\"sales_fact\")),__.has('Table.name', eq(\"testtable_1\"))).limit(25).toList()");
verify("Table where name=\"sales_fact\" or name=\"testtable_1\" or name=\"testtable_2\"",
"g.V().has('__typeName', 'Table')" +
".or(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.has('Table.name', eq(\"testtable_1\"))," +
"__.has('Table.name', eq(\"testtable_2\"))" +
").limit(25).toList()");
verify("Table where name=\"sales_fact\" and name=\"testtable_1\" and name=\"testtable_2\"",
"g.V().has('__typeName', 'Table')" +
".and(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.has('Table.name', eq(\"testtable_1\"))," +
"__.has('Table.name', eq(\"testtable_2\"))" +
").limit(25).toList()");
verify("Table where (name=\"sales_fact\" or name=\"testtable_1\") and name=\"testtable_2\"",
"g.V().has('__typeName', 'Table')" +
".and(" +
"__.or(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.has('Table.name', eq(\"testtable_1\"))" +
")," +
"__.has('Table.name', eq(\"testtable_2\")))" +
".limit(25).toList()");
verify("Table where name=\"sales_fact\" or (name=\"testtable_1\" and name=\"testtable_2\")",
"g.V().has('__typeName', 'Table')" +
".or(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.and(" +
"__.has('Table.name', eq(\"testtable_1\"))," +
"__.has('Table.name', eq(\"testtable_2\")))" +
")" +
".limit(25).toList()");
verify("Table where name=\"sales_fact\" or name=\"testtable_1\" and name=\"testtable_2\"",
"g.V().has('__typeName', 'Table')" +
".and(" +
"__.or(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.has('Table.name', eq(\"testtable_1\"))" +
")," +
"__.has('Table.name', eq(\"testtable_2\")))" +
".limit(25).toList()");
verify("Table where (name=\"sales_fact\" and owner=\"Joe\") OR (name=\"sales_fact_daily_mv\" and owner=\"Joe BI\")",
"g.V().has('__typeName', 'Table')" +
".or(" +
"__.and(" +
"__.has('Table.name', eq(\"sales_fact\"))," +
"__.has('Table.owner', eq(\"Joe\"))" +
")," +
"__.and(" +
"__.has('Table.name', eq(\"sales_fact_daily_mv\"))," +
"__.has('Table.owner', eq(\"Joe BI\"))" +
"))" +
".limit(25).toList()");
verify("Table where owner=\"hdfs\" or ((name=\"testtable_1\" or name=\"testtable_2\") and createdTime < \"2017-12-12T02:35:58.440Z\")",
"g.V().has('__typeName', 'Table').or(__.has('Table.owner', eq(\"hdfs\")),__.and(__.or(__.has('Table.name', eq(\"testtable_1\")),__.has('Table.name', eq(\"testtable_2\"))),__.has('Table.createdTime', lt('1513046158440')))).limit(25).toList()");
verify("hive_db where hive_db.name='Reporting' and hive_db.createTime < '2017-12-12T02:35:58.440Z'",
"g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', eq('Reporting')),__.has('hive_db.createTime', lt('1513046158440'))).limit(25).toList()");
verify("Table where db.name='Sales' and db.clusterName='cl1'",
"g.V().has('__typeName', 'Table').and(__.out('__Table.db').has('DB.name', eq('Sales')).dedup().in('__Table.db'),__.out('__Table.db').has('DB.clusterName', eq('cl1')).dedup().in('__Table.db')).limit(25).toList()");
public void hasInWhereClause() {
verify("Table as t where t has name and t isa Dimension",
"g.V().has('__typeName', 'Table').as('t').and(__.has('Table.name'),__.has('__traitNames', within('Dimension'))).limit(25).toList()");
verify("Table as t where t has name and t.name = 'sales_fact'",
"g.V().has('__typeName', 'Table').as('t').and(__.has('Table.name'),__.has('Table.name', eq('sales_fact'))).limit(25).toList()");
verify("Table as t where t is Dimension and t.name = 'sales_fact'",
"g.V().has('__typeName', 'Table').as('t').and(__.has('__traitNames', within('Dimension')),__.has('Table.name', eq('sales_fact'))).limit(25).toList()");
verify("Table isa 'Dimension' and t.name = 'sales_fact'", "g.V().has('__typeName', 'Table').has('__traitNames', within(''Dimension'')).limit(25).toList()");
}
@Test
......@@ -305,23 +319,13 @@ public class GremlinQueryComposerTest {
}
private AtlasDSLParser.QueryContext getParsedQuery(String query) {
AtlasDSL.Parser parser = new AtlasDSL.Parser();
AtlasDSLParser.QueryContext queryContext = null;
InputStream stream = new ByteArrayInputStream(query.getBytes());
AtlasDSLLexer lexer = null;
try {
lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
} catch (IOException e) {
assertTrue(false);
queryContext = parser.parse(query);
} catch (AtlasBaseException e) {
assertFalse(e != null, e.getMessage());
}
TokenStream inputTokenStream = new CommonTokenStream(lexer);
AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream);
queryContext = parser.query();
assertNotNull(queryContext);
assertNull(queryContext.exception);
return queryContext;
}
......@@ -430,5 +434,10 @@ public class GremlinQueryComposerTest {
return attributeName.equals("createdTime") ||
attributeName.equals("createTime");
}
@Override
public List<String> getErrorList() {
return errorList;
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment