Commit f28d0f54 by Ashutosh Mestry Committed by apoorvnaik

ATLAS-2356: Optional where clause.

parent ecf8095f
...@@ -35,7 +35,6 @@ import org.apache.atlas.model.profile.AtlasUserSavedSearch; ...@@ -35,7 +35,6 @@ import org.apache.atlas.model.profile.AtlasUserSavedSearch;
import org.apache.atlas.query.AtlasDSL; import org.apache.atlas.query.AtlasDSL;
import org.apache.atlas.query.GremlinQuery; import org.apache.atlas.query.GremlinQuery;
import org.apache.atlas.query.QueryParams; import org.apache.atlas.query.QueryParams;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer; import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper; import org.apache.atlas.repository.graph.GraphHelper;
...@@ -68,7 +67,6 @@ import javax.script.Bindings; ...@@ -68,7 +67,6 @@ import javax.script.Bindings;
import javax.script.ScriptEngine; import javax.script.ScriptEngine;
import javax.script.ScriptException; import javax.script.ScriptException;
import java.util.*; import java.util.*;
import java.util.stream.Stream;
import static org.apache.atlas.AtlasErrorCode.CLASSIFICATION_NOT_FOUND; import static org.apache.atlas.AtlasErrorCode.CLASSIFICATION_NOT_FOUND;
import static org.apache.atlas.AtlasErrorCode.DISCOVERY_QUERY_FAILED; import static org.apache.atlas.AtlasErrorCode.DISCOVERY_QUERY_FAILED;
...@@ -897,27 +895,16 @@ public class EntityDiscoveryService implements AtlasDiscoveryService { ...@@ -897,27 +895,16 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
@Override @Override
public String getDslQueryUsingTypeNameClassification(String query, String typeName, String classification) { public String getDslQueryUsingTypeNameClassification(String query, String typeName, String classification) {
final String whereDSLKeyword = "where";
final String[] keywords = new String[]{whereDSLKeyword, "isa", "is", "limit", "orderby", "has"};
final String whereFormat = whereDSLKeyword + " %s";
String queryStr = query == null ? "" : query; String queryStr = query == null ? "" : query;
if (StringUtils.isNotEmpty(typeName)) { if (StringUtils.isNotEmpty(typeName)) {
if(StringUtils.isNotEmpty(query)) {
String s = query.toLowerCase();
if(!Stream.of(keywords).anyMatch(x -> s.startsWith(x))) {
queryStr = String.format(whereFormat, query);
}
}
queryStr = escapeTypeName(typeName) + " " + queryStr; queryStr = escapeTypeName(typeName) + " " + queryStr;
} }
if (StringUtils.isNotEmpty(classification)) { if (StringUtils.isNotEmpty(classification)) {
// isa works with a type name only - like hive_column isa PII; it doesn't work with more complex query // isa works with a type name only - like hive_column isa PII; it doesn't work with more complex query
if (StringUtils.isEmpty(query)) { if (StringUtils.isEmpty(query)) {
queryStr += String.format("%s %s %s", queryStr, "isa", classification); queryStr += (" isa " + classification);
} }
} }
return queryStr; return queryStr;
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
*/ */
package org.apache.atlas.query; package org.apache.atlas.query;
import com.google.common.annotations.VisibleForTesting;
import org.antlr.v4.runtime.BaseErrorListener; import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CharStreams; import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.CommonTokenStream;
...@@ -53,7 +54,8 @@ public class AtlasDSL { ...@@ -53,7 +54,8 @@ public class AtlasDSL {
return RESERVED_KEYWORDS.contains(word); return RESERVED_KEYWORDS.contains(word);
} }
private static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException { @VisibleForTesting
static AtlasDSLParser.QueryContext parse(String queryStr) throws AtlasBaseException {
AtlasDSLParser.QueryContext ret; AtlasDSLParser.QueryContext ret;
try { try {
......
...@@ -24,9 +24,9 @@ import org.apache.commons.collections.CollectionUtils; ...@@ -24,9 +24,9 @@ import org.apache.commons.collections.CollectionUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.ArrayList; import java.util.*;
import java.util.List;
import java.util.Objects; import static org.apache.atlas.query.antlr4.AtlasDSLParser.RULE_whereClause;
public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> { public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
private static final Logger LOG = LoggerFactory.getLogger(DSLVisitor.class); private static final Logger LOG = LoggerFactory.getLogger(DSLVisitor.class);
...@@ -34,6 +34,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> { ...@@ -34,6 +34,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
private static final String AND = "AND"; private static final String AND = "AND";
private static final String OR = "OR"; private static final String OR = "OR";
private Set<Integer> visitedRuleIndexes = new HashSet<>();
private final GremlinQueryComposer gremlinQueryComposer; private final GremlinQueryComposer gremlinQueryComposer;
public DSLVisitor(GremlinQueryComposer gremlinQueryComposer) { public DSLVisitor(GremlinQueryComposer gremlinQueryComposer) {
...@@ -41,12 +42,27 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> { ...@@ -41,12 +42,27 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
} }
@Override @Override
public Void visitSpaceDelimitedQueries(SpaceDelimitedQueriesContext ctx) {
addVisitedRule(ctx.getRuleIndex());
return super.visitSpaceDelimitedQueries(ctx);
}
@Override
public Void visitCommaDelimitedQueries(CommaDelimitedQueriesContext ctx) {
addVisitedRule(ctx.getRuleIndex());
return super.visitCommaDelimitedQueries(ctx);
}
@Override
public Void visitIsClause(IsClauseContext ctx) { public Void visitIsClause(IsClauseContext ctx) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitIsClause({})", ctx); LOG.debug("=> DSLVisitor.visitIsClause({})", ctx);
} }
if(!hasVisitedRule(RULE_whereClause)) {
gremlinQueryComposer.addFromIsA(ctx.arithE().getText(), ctx.identifier().getText()); gremlinQueryComposer.addFromIsA(ctx.arithE().getText(), ctx.identifier().getText());
}
return super.visitIsClause(ctx); return super.visitIsClause(ctx);
} }
...@@ -56,7 +72,10 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> { ...@@ -56,7 +72,10 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
LOG.debug("=> DSLVisitor.visitHasClause({})", ctx); LOG.debug("=> DSLVisitor.visitHasClause({})", ctx);
} }
if(!hasVisitedRule(RULE_whereClause)) {
gremlinQueryComposer.addFromProperty(ctx.arithE().getText(), ctx.identifier().getText()); gremlinQueryComposer.addFromProperty(ctx.arithE().getText(), ctx.identifier().getText());
}
return super.visitHasClause(ctx); return super.visitHasClause(ctx);
} }
...@@ -140,10 +159,8 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> { ...@@ -140,10 +159,8 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
LOG.debug("=> DSLVisitor.visitWhereClause({})", ctx); LOG.debug("=> DSLVisitor.visitWhereClause({})", ctx);
} }
addVisitedRule(ctx.getRuleIndex());
// The first expr shouldn't be processed if there are following exprs
ExprContext expr = ctx.expr(); ExprContext expr = ctx.expr();
processExpr(expr, gremlinQueryComposer); processExpr(expr, gremlinQueryComposer);
return super.visitWhereClause(ctx); return super.visitWhereClause(ctx);
} }
...@@ -170,6 +187,19 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> { ...@@ -170,6 +187,19 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
} }
@Override @Override
public Void visitSingleQrySrc(SingleQrySrcContext ctx) {
if (!hasVisitedRule(RULE_whereClause)) {
if (ctx.fromExpression() == null) {
if (ctx.expr() != null && gremlinQueryComposer.hasFromClause()) {
processExpr(ctx.expr(), gremlinQueryComposer);
}
}
}
return super.visitSingleQrySrc(ctx);
}
@Override
public Void visitGroupByExpression(GroupByExpressionContext ctx) { public Void visitGroupByExpression(GroupByExpressionContext ctx) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("=> DSLVisitor.visitGroupByExpression({})", ctx); LOG.debug("=> DSLVisitor.visitGroupByExpression({})", ctx);
...@@ -238,11 +268,11 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> { ...@@ -238,11 +268,11 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
} }
private void processExpr(final CompEContext compE, final GremlinQueryComposer gremlinQueryComposer) { private void processExpr(final CompEContext compE, final GremlinQueryComposer gremlinQueryComposer) {
if (compE != null && compE.isClause() == null && compE.hasClause() == null && compE.isClause() == null) { if (compE != null && compE.isClause() == null && compE.hasClause() == null) {
ComparisonClauseContext comparisonClause = compE.comparisonClause(); ComparisonClauseContext comparisonClause = compE.comparisonClause();
// The nested expression might have ANDs/ORs // The nested expression might have ANDs/ORs
if(comparisonClause == null) { if (comparisonClause == null) {
ExprContext exprContext = compE.arithE().multiE().atomE().expr(); ExprContext exprContext = compE.arithE().multiE().atomE().expr();
// Only extract comparison clause if there are no nested exprRight clauses // Only extract comparison clause if there are no nested exprRight clauses
if (CollectionUtils.isEmpty(exprContext.exprRight())) { if (CollectionUtils.isEmpty(exprContext.exprRight())) {
...@@ -254,7 +284,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> { ...@@ -254,7 +284,7 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
String lhs = comparisonClause.arithE(0).getText(); String lhs = comparisonClause.arithE(0).getText();
String op, rhs; String op, rhs;
AtomEContext atomECtx = comparisonClause.arithE(1).multiE().atomE(); AtomEContext atomECtx = comparisonClause.arithE(1).multiE().atomE();
if(atomECtx.literal() == null || if (atomECtx.literal() == null ||
(atomECtx.literal() != null && atomECtx.literal().valueArray() == null)) { (atomECtx.literal() != null && atomECtx.literal().valueArray() == null)) {
op = comparisonClause.operator().getText().toUpperCase(); op = comparisonClause.operator().getText().toUpperCase();
rhs = comparisonClause.arithE(1).getText(); rhs = comparisonClause.arithE(1).getText();
...@@ -268,6 +298,14 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> { ...@@ -268,6 +298,14 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
processExpr(compE.arithE().multiE().atomE().expr(), gremlinQueryComposer); processExpr(compE.arithE().multiE().atomE().expr(), gremlinQueryComposer);
} }
} }
if (compE != null && compE.isClause() != null) {
gremlinQueryComposer.addFromIsA(compE.isClause().arithE().getText(), compE.isClause().identifier().getText());
}
if (compE != null && compE.hasClause() != null) {
gremlinQueryComposer.addFromProperty(compE.hasClause().arithE().getText(), compE.hasClause().identifier().getText());
}
} }
private String getInClause(AtomEContext atomEContext) { private String getInClause(AtomEContext atomEContext) {
...@@ -281,4 +319,12 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> { ...@@ -281,4 +319,12 @@ public class DSLVisitor extends AtlasDSLParserBaseVisitor<Void> {
return sb.toString(); return sb.toString();
} }
private void addVisitedRule(int ruleIndex) {
visitedRuleIndexes.add(ruleIndex);
}
private boolean hasVisitedRule(int ruleIndex) {
return visitedRuleIndexes.contains(ruleIndex);
}
} }
...@@ -56,16 +56,13 @@ public class GremlinQueryComposer { ...@@ -56,16 +56,13 @@ public class GremlinQueryComposer {
private int providedOffset = DEFAULT_QUERY_RESULT_OFFSET; private int providedOffset = DEFAULT_QUERY_RESULT_OFFSET;
private Context context; private Context context;
private static final ThreadLocal<DateFormat> DSL_DATE_FORMAT = new ThreadLocal<DateFormat>() { private static final ThreadLocal<DateFormat> DSL_DATE_FORMAT = ThreadLocal.withInitial(() -> {
@Override
public DateFormat initialValue() {
DateFormat ret = new SimpleDateFormat(ISO8601_FORMAT); DateFormat ret = new SimpleDateFormat(ISO8601_FORMAT);
ret.setTimeZone(TimeZone.getTimeZone("UTC")); ret.setTimeZone(TimeZone.getTimeZone("UTC"));
return ret; return ret;
} });
};
public GremlinQueryComposer(Lookup registryLookup, final AtlasDSL.QueryMetadata qmd, boolean isNestedQuery) { public GremlinQueryComposer(Lookup registryLookup, final AtlasDSL.QueryMetadata qmd, boolean isNestedQuery) {
this.isNestedQuery = isNestedQuery; this.isNestedQuery = isNestedQuery;
...@@ -124,13 +121,19 @@ public class GremlinQueryComposer { ...@@ -124,13 +121,19 @@ public class GremlinQueryComposer {
LOG.debug("addFromProperty(typeName={}, attribute={})", typeName, attribute); LOG.debug("addFromProperty(typeName={}, attribute={})", typeName, attribute);
} }
if(!isNestedQuery) {
addFrom(typeName); addFrom(typeName);
}
add(GremlinClause.HAS_PROPERTY, add(GremlinClause.HAS_PROPERTY,
IdentifierHelper.getQualifiedName(lookup, context, attribute)); IdentifierHelper.getQualifiedName(lookup, context, attribute));
} }
public void addFromIsA(String typeName, String traitName) { public void addFromIsA(String typeName, String traitName) {
if (!isNestedQuery) {
addFrom(typeName); addFrom(typeName);
}
add(GremlinClause.TRAIT, traitName); add(GremlinClause.TRAIT, traitName);
} }
...@@ -284,6 +287,15 @@ public class GremlinQueryComposer { ...@@ -284,6 +287,15 @@ public class GremlinQueryComposer {
return s; return s;
} }
public List<String> getErrorList() {
combineErrorLists();
return errorList;
}
private void combineErrorLists() {
errorList.addAll(context.getErrorList());
}
private String getTransformedClauses(String[] items) { private String getTransformedClauses(String[] items) {
String ret; String ret;
String body = String.join(".", Stream.of(items).filter(Objects::nonNull).collect(Collectors.toList())); String body = String.join(".", Stream.of(items).filter(Objects::nonNull).collect(Collectors.toList()));
...@@ -406,7 +418,7 @@ public class GremlinQueryComposer { ...@@ -406,7 +418,7 @@ public class GremlinQueryComposer {
} }
private void moveToLast(GremlinClause clause) { private void moveToLast(GremlinClause clause) {
int index = queryClauses.hasClause(clause); int index = queryClauses.contains(clause);
if (-1 == index) { if (-1 == index) {
return; return;
} }
...@@ -475,6 +487,11 @@ public class GremlinQueryComposer { ...@@ -475,6 +487,11 @@ public class GremlinQueryComposer {
add(GremlinClause.GROUP_BY, ia.getQualifiedName()); add(GremlinClause.GROUP_BY, ia.getQualifiedName());
} }
public boolean hasFromClause() {
return queryClauses.contains(GremlinClause.HAS_TYPE) != -1 ||
queryClauses.contains(GremlinClause.HAS_TYPE_WITHIN) != -1;
}
private static class GremlinClauseValue { private static class GremlinClauseValue {
private final GremlinClause clause; private final GremlinClause clause;
private final String value; private final String value;
...@@ -540,7 +557,7 @@ public class GremlinQueryComposer { ...@@ -540,7 +557,7 @@ public class GremlinQueryComposer {
return list.size(); return list.size();
} }
public int hasClause(GremlinClause clause) { public int contains(GremlinClause clause) {
for (int i = 0; i < list.size(); i++) { for (int i = 0; i < list.size(); i++) {
if (list.get(i).getClause() == clause) if (list.get(i).getClause() == clause)
return i; return i;
...@@ -640,5 +657,10 @@ public class GremlinQueryComposer { ...@@ -640,5 +657,10 @@ public class GremlinQueryComposer {
aliasMap.put(alias, typeName); aliasMap.put(alias, typeName);
} }
public List<String> getErrorList() {
errorList.addAll(lookup.getErrorList());
return errorList;
}
} }
} }
...@@ -20,6 +20,9 @@ package org.apache.atlas.query; ...@@ -20,6 +20,9 @@ package org.apache.atlas.query;
import org.apache.atlas.type.AtlasType; import org.apache.atlas.type.AtlasType;
import java.util.Collection;
import java.util.List;
public interface Lookup { public interface Lookup {
AtlasType getType(String typeName); AtlasType getType(String typeName);
...@@ -40,4 +43,6 @@ public interface Lookup { ...@@ -40,4 +43,6 @@ public interface Lookup {
String getTypeFromEdge(GremlinQueryComposer.Context context, String item); String getTypeFromEdge(GremlinQueryComposer.Context context, String item);
boolean isDate(GremlinQueryComposer.Context context, String attributeName); boolean isDate(GremlinQueryComposer.Context context, String attributeName);
List<String> getErrorList();
} }
...@@ -159,4 +159,9 @@ class RegistryBasedLookup implements Lookup { ...@@ -159,4 +159,9 @@ class RegistryBasedLookup implements Lookup {
protected void addError(String s) { protected void addError(String s) {
errorList.add(s); errorList.add(s);
} }
@Override
public List<String> getErrorList() {
return errorList;
}
} }
...@@ -17,19 +17,12 @@ ...@@ -17,19 +17,12 @@
*/ */
package org.apache.atlas.query; package org.apache.atlas.query;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.TokenStream;
import org.apache.atlas.TestModules; import org.apache.atlas.TestModules;
import org.apache.atlas.discovery.EntityDiscoveryService; import org.apache.atlas.discovery.EntityDiscoveryService;
import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.discovery.AtlasSearchResult; import org.apache.atlas.model.discovery.AtlasSearchResult;
import org.apache.atlas.query.antlr4.AtlasDSLLexer;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.runner.LocalSolrRunner; import org.apache.atlas.runner.LocalSolrRunner;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.testng.ITestContext;
import org.testng.annotations.*; import org.testng.annotations.*;
import javax.inject.Inject; import javax.inject.Inject;
...@@ -38,13 +31,6 @@ import java.util.ArrayList; ...@@ -38,13 +31,6 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNotNull;
...@@ -120,6 +106,9 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -120,6 +106,9 @@ public class DSLQueriesTest extends BasicTestSetup {
public void comparison(String query, int expected) throws AtlasBaseException { public void comparison(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0); AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, expected); assertSearchResult(searchResult, expected);
AtlasSearchResult searchResult2 = discoveryService.searchUsingDslQuery(query.replace("where", " "), 25, 0);
assertSearchResult(searchResult2, expected);
} }
@DataProvider(name = "basicProvider") @DataProvider(name = "basicProvider")
...@@ -128,7 +117,6 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -128,7 +117,6 @@ public class DSLQueriesTest extends BasicTestSetup {
{"from hive_db", 3}, {"from hive_db", 3},
{"hive_db", 3}, {"hive_db", 3},
{"hive_db where hive_db.name=\"Reporting\"", 1}, {"hive_db where hive_db.name=\"Reporting\"", 1},
{"hive_db hive_db.name = \"Reporting\"", 3},
{"hive_db where hive_db.name=\"Reporting\" select name, owner", 1}, {"hive_db where hive_db.name=\"Reporting\" select name, owner", 1},
{"hive_db has name", 3}, {"hive_db has name", 3},
{"from hive_table", 10}, {"from hive_table", 10},
...@@ -155,8 +143,6 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -155,8 +143,6 @@ public class DSLQueriesTest extends BasicTestSetup {
{"`Log Data`", 3}, {"`Log Data`", 3},
{"`isa`", 0}, {"`isa`", 0},
{"hive_table as t, sd, hive_column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType", 0}, {"hive_table as t, sd, hive_column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType", 0},
{"hive_table where name='sales_fact', db where name='Sales'", 1},
{"hive_table where name='sales_fact', db where name='Reporting'", 0},
{"DataSet where name='sales_fact'", 1}, {"DataSet where name='sales_fact'", 1},
{"Asset where name='sales_fact'", 1} {"Asset where name='sales_fact'", 1}
}; };
...@@ -164,6 +150,11 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -164,6 +150,11 @@ public class DSLQueriesTest extends BasicTestSetup {
@Test(dataProvider = "basicProvider") @Test(dataProvider = "basicProvider")
public void basic(String query, int expected) throws AtlasBaseException { public void basic(String query, int expected) throws AtlasBaseException {
queryAssert(query, expected);
//queryAssert(query.replace("where", " "), expected);
}
private void queryAssert(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0); AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, expected); assertSearchResult(searchResult, expected);
} }
...@@ -182,8 +173,8 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -182,8 +173,8 @@ public class DSLQueriesTest extends BasicTestSetup {
@Test(dataProvider = "limitProvider") @Test(dataProvider = "limitProvider")
public void limit(String query, int expected, int limit, int offset) throws AtlasBaseException { public void limit(String query, int expected, int limit, int offset) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, limit, offset); queryAssert(query, expected);
assertSearchResult(searchResult, expected); queryAssert(query.replace("where", " "), expected);
} }
@DataProvider(name = "syntaxVerifierProvider") @DataProvider(name = "syntaxVerifierProvider")
...@@ -268,25 +259,16 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -268,25 +259,16 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1", 0}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 0 offset 1", 0}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 0 offset 1", 0},
{"hive_table where name='sales_fact', db where name='Sales'", 1},
{"hive_table where name='sales_fact', db where name='Sales' limit 10", 1},
{"hive_table where name='sales_fact', db where name='Sales' limit 10 offset 1", 0},
{"hive_table where name='sales_fact', db where name='Reporting'", 0},
{"hive_table where name='sales_fact', db where name='Reporting' limit 10", 0},
{"hive_table where name='sales_fact', db where name='Reporting' limit 10 offset 1", 0},
{"hive_db as d where owner = ['John ETL', 'Jane BI']", 2}, {"hive_db as d where owner = ['John ETL', 'Jane BI']", 2},
{"hive_db as d where owner = ['John ETL', 'Jane BI'] limit 10", 2}, {"hive_db as d where owner = ['John ETL', 'Jane BI'] limit 10", 2},
{"hive_db as d where owner = ['John ETL', 'Jane BI'] limit 10 offset 1", 1}, {"hive_db as d where owner = ['John ETL', 'Jane BI'] limit 10 offset 1", 1},
}; };
} }
@Test(dataProvider = "syntaxVerifierProvider") @Test(dataProvider = "syntaxVerifierProvider")
public void syntax(String query, int expected) throws AtlasBaseException { public void syntax(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0); queryAssert(query, expected);
assertSearchResult(searchResult, expected); queryAssert(query.replace("where", " "), expected);
} }
@DataProvider(name = "orderByProvider") @DataProvider(name = "orderByProvider")
...@@ -365,8 +347,8 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -365,8 +347,8 @@ public class DSLQueriesTest extends BasicTestSetup {
@Test(dataProvider = "orderByProvider") @Test(dataProvider = "orderByProvider")
public void orderBy(String query, int expected, String orderBy, boolean ascending) throws AtlasBaseException { public void orderBy(String query, int expected, String orderBy, boolean ascending) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0); queryAssert(query, expected);
assertSearchResult(searchResult, expected); queryAssert(query.replace("where", " "), expected);
} }
@DataProvider(name = "likeQueriesProvider") @DataProvider(name = "likeQueriesProvider")
...@@ -376,13 +358,18 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -376,13 +358,18 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_db where name like \"R*\"", 1}, {"hive_db where name like \"R*\"", 1},
{"hive_db where hive_db.name like \"R???rt?*\" or hive_db.name like \"S?l?s\" or hive_db.name like\"Log*\"", 3}, {"hive_db where hive_db.name like \"R???rt?*\" or hive_db.name like \"S?l?s\" or hive_db.name like\"Log*\"", 3},
{"hive_db where hive_db.name like \"R???rt?*\" and hive_db.name like \"S?l?s\" and hive_db.name like\"Log*\"", 0}, {"hive_db where hive_db.name like \"R???rt?*\" and hive_db.name like \"S?l?s\" and hive_db.name like\"Log*\"", 0},
{"hive_table where name like 'sales*', db where name like 'Sa?es'", 1},
{"hive_table where name like 'sales*' and db.name like 'Sa?es'", 1}, {"hive_table where name like 'sales*' and db.name like 'Sa?es'", 1},
{"hive_table where db.name like \"Sa*\"", 4}, {"hive_table where db.name like \"Sa*\"", 4},
{"hive_table where db.name like \"Sa*\" and name like \"*dim\"", 3}, {"hive_table where db.name like \"Sa*\" and name like \"*dim\"", 3},
}; };
} }
@Test(dataProvider = "likeQueriesProvider")
public void likeQueries(String query, int expected) throws AtlasBaseException {
queryAssert(query, expected);
queryAssert(query.replace("where", " "), expected);
}
@DataProvider(name = "minMaxCountProvider") @DataProvider(name = "minMaxCountProvider")
private Object[][] minMaxCountQueries() { private Object[][] minMaxCountQueries() {
return new Object[][]{ return new Object[][]{
...@@ -499,14 +486,26 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -499,14 +486,26 @@ public class DSLQueriesTest extends BasicTestSetup {
@Test(dataProvider = "minMaxCountProvider") @Test(dataProvider = "minMaxCountProvider")
public void minMaxCount(String query, FieldValueValidator fv) throws AtlasBaseException { public void minMaxCount(String query, FieldValueValidator fv) throws AtlasBaseException {
queryAssert(query, fv);
queryAssert(query.replace("where", " "), fv);
}
private void queryAssert(String query, FieldValueValidator fv) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0); AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertSearchResult(searchResult, fv); assertSearchResult(searchResult, fv);
} }
@Test(dataProvider = "likeQueriesProvider") private void assertSearchResult(AtlasSearchResult searchResult, FieldValueValidator expected) {
public void likeQueries(String query, int expected) throws AtlasBaseException { assertNotNull(searchResult);
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0); assertNull(searchResult.getEntities());
assertSearchResult(searchResult, expected);
assertEquals(searchResult.getAttributes().getName().size(), expected.getFieldNamesCount());
for (int i = 0; i < searchResult.getAttributes().getName().size(); i++) {
String s = searchResult.getAttributes().getName().get(i);
assertEquals(s, expected.fieldNames[i]);
}
assertEquals(searchResult.getAttributes().getValues().size(), expected.values.size());
} }
private void assertSearchResult(AtlasSearchResult searchResult, int expected) { private void assertSearchResult(AtlasSearchResult searchResult, int expected) {
...@@ -523,19 +522,6 @@ public class DSLQueriesTest extends BasicTestSetup { ...@@ -523,19 +522,6 @@ public class DSLQueriesTest extends BasicTestSetup {
} }
} }
private void assertSearchResult(AtlasSearchResult searchResult, FieldValueValidator expected) {
assertNotNull(searchResult);
assertNull(searchResult.getEntities());
assertEquals(searchResult.getAttributes().getName().size(), expected.getFieldNamesCount());
for (int i = 0; i < searchResult.getAttributes().getName().size(); i++) {
String s = searchResult.getAttributes().getName().get(i);
assertEquals(s, expected.fieldNames[i]);
}
assertEquals(searchResult.getAttributes().getValues().size(), expected.values.size());
}
private class FieldValueValidator { private class FieldValueValidator {
class ResultObject { class ResultObject {
Map<String, Object> fieldValues = new HashMap<>(); Map<String, Object> fieldValues = new HashMap<>();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment