Commit 5384a742 by Madhan Neethiraj

ATLAS-2229: fix for unit test failures in DSL tests; also fixed incorrect…

ATLAS-2229: fix for unit test failures in DSL tests; also fixed incorrect handling of orderby/groupby when limit/offset is present
parent 2a2b07e9
......@@ -344,7 +344,7 @@ public class AtlasJanusGraph implements AtlasGraph<AtlasJanusVertex, AtlasJanusE
return result;
} catch (ScriptException e) {
throw new AtlasBaseException(AtlasErrorCode.GREMLIN_SCRIPT_EXECUTION_FAILED, gremlinQuery);
throw new AtlasBaseException(AtlasErrorCode.GREMLIN_SCRIPT_EXECUTION_FAILED, e, gremlinQuery);
} finally {
releaseGremlinScriptEngine(scriptEngine);
}
......
......@@ -281,8 +281,7 @@ public class QueryProcessor {
LOG.debug("addGroupBy(item={})", item);
}
add(GremlinClause.GROUP);
addByClause(item, false);
addGroupByClause(item);
hasGrpBy = true;
}
......@@ -347,8 +346,7 @@ public class QueryProcessor {
LOG.debug("addOrderBy(name={}, isDesc={})", name, isDesc);
}
add(GremlinClause.ORDER);
addByClause(name, isDesc);
addOrderByClause(name, isDesc);
}
private void updatePosition(GremlinClause clause) {
......@@ -407,22 +405,29 @@ public class QueryProcessor {
}
}
private void addByClause(String name, boolean descr) {
private void addOrderByClause(String name, boolean descr) {
if (LOG.isDebugEnabled()) {
LOG.debug("addByClause(name={})", name, descr);
LOG.debug("addOrderByClause(name={})", name, descr);
}
IdentifierHelper.Advice ia = getAdvice(name);
add((!descr) ? GremlinClause.BY : GremlinClause.BY_DESC, ia.getQualifiedName());
add((!descr) ? GremlinClause.ORDER_BY : GremlinClause.ORDER_BY_DESC, ia.getQualifiedName());
}
private void addGroupByClause(String name) {
if (LOG.isDebugEnabled()) {
LOG.debug("addGroupByClause(name={})", name);
}
IdentifierHelper.Advice ia = getAdvice(name);
add(GremlinClause.GROUP_BY, ia.getQualifiedName());
}
private enum GremlinClause {
AS("as('%s')"),
BY("by('%s')"),
BY_DESC("by('%s', decr)"),
DEDUP("dedup()"),
G("g"),
GROUP("group()"),
GROUP_BY("group().by('%')"),
HAS("has('%s', %s)"),
HAS_OPERATOR("has('%s', %s(%s))"),
HAS_PROPERTY("has('%s')"),
......@@ -436,7 +441,8 @@ public class QueryProcessor {
NESTED_START("__"),
NESTED_HAS_OPERATOR("has('%s', %s(%s))"),
LIMIT("limit(%s)"),
ORDER("order()"),
ORDER_BY("order().by('%s')"),
ORDER_BY_DESC("order().by('%s', decr)"),
OUT("out('%s')"),
RANGE("range(%s, %s + %s)"),
SELECT("select('%s')"),
......
......@@ -148,6 +148,7 @@ public abstract class BasicTestSetup {
entities.addAll(salesFactColumns);
AtlasEntity salesFact = table("sales_fact", "sales fact table", salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact");
salesFact.setAttribute("createTime", new Date(2018, 01, 01));
entities.add(salesFact);
List<AtlasEntity> logFactColumns = ImmutableList
......@@ -179,7 +180,7 @@ public abstract class BasicTestSetup {
AtlasEntity circularLineageTable1 = table("table1", "", reportingDB, sd, "Vimal", "Managed", salesFactColumns, "Metric");
entities.add(circularLineageTable1);
AtlasEntity circularLineageTable2 = table("table2", "", reportingDB, sd, "Vimal", "Managed", salesFactColumns, "Metric");
AtlasEntity circularLineageTable2 = table("table2", "", reportingDB, sd, "Vimal 2", "Managed", salesFactColumns, "Metric");
entities.add(circularLineageTable2);
AtlasEntity circularLineage1Process = loadProcess("circularLineage1", "hive query for daily summary", "John ETL", ImmutableList.of(circularLineageTable1),
......@@ -209,7 +210,7 @@ public abstract class BasicTestSetup {
entities.addAll(productDimColumns);
AtlasEntity productDim =
table("product_dim", "product dimension table", salesDB, sd, "John Doe", "Managed", productDimColumns,
table("product_dim", "product dimension table", salesDB, sd, "John Doe 2", "Managed", productDimColumns,
"Dimension");
entities.add(productDim);
......@@ -240,7 +241,7 @@ public abstract class BasicTestSetup {
entities.add(loadSalesMonthly);
AtlasEntity loggingFactMonthly =
table("logging_fact_monthly_mv", "logging fact monthly materialized view", logDB, sd, "Tim ETL",
table("logging_fact_monthly_mv", "logging fact monthly materialized view", logDB, sd, "Tim ETL 2",
"Managed", logFactColumns, "Log Data");
entities.add(loggingFactMonthly);
......@@ -318,9 +319,12 @@ public abstract class BasicTestSetup {
protected AtlasEntity table(String name, String description, AtlasEntity db, AtlasEntity sd, String owner, String tableType,
List<AtlasEntity> columns, String... traitNames) {
String dbName = db.getAttribute(AtlasClient.NAME).toString();
String clusterName = db.getAttribute("clusterName").toString();
AtlasEntity table = new AtlasEntity(HIVE_TABLE_TYPE);
table.setAttribute("name", name);
table.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "qualified:" + name);
table.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName + "." + name);
table.setAttribute("description", description);
table.setAttribute("owner", owner);
table.setAttribute("tableType", tableType);
......@@ -335,6 +339,12 @@ public abstract class BasicTestSetup {
table.setAttribute("columns", getAtlasObjectIds(columns));
table.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList()));
sd.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName + "." + name + "@" + clusterName + "_storage");
for (AtlasEntity column : columns) {
column.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName + "." + name + "." + column.getAttribute(AtlasClient.NAME).toString() + "@" + clusterName);
}
return table;
}
......
......@@ -22,6 +22,7 @@ import org.apache.atlas.discovery.EntityDiscoveryService;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.discovery.AtlasSearchResult;
import org.apache.atlas.runner.LocalSolrRunner;
import org.apache.commons.collections.CollectionUtils;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
......@@ -116,11 +117,11 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_table", 10},
{"hive_table isa Dimension", 3},
{"hive_column where hive_column isa PII", 4},
{"hive_column where hive_column isa PII select hive_column.name", 4},
{"hive_column select hive_column.name", 17},
{"hive_column select name", 17},
{"hive_column where hive_column isa PII select hive_column.qualifiedName", 4},
{"hive_column select hive_column.qualifiedName", 17},
{"hive_column select qualifiedName", 17},
{"hive_column where hive_column.name=\"customer_id\"", 2},
{"from hive_table select hive_table.name", 10},
{"from hive_table select hive_table.qualifiedName", 10},
{"hive_db where (name = \"Reporting\")", 1},
{"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1", 1},
{"hive_db where hive_db is JdbcAccess", 0},
......@@ -154,8 +155,8 @@ public class DSLQueriesTest extends BasicTestSetup {
return new Object[][]{
{"hive_column", 17, 40, 0},
{"hive_column limit 10", 10, 50, 0},
{"hive_column select hive_column.name limit 10", 10, 5, 0},
{"hive_column select hive_column.name limit 40 offset 10", 7, 40, 0},
{"hive_column select hive_column.qualifiedName limit 10", 10, 5, 0},
{"hive_column select hive_column.qualifiedName limit 40 offset 10", 7, 40, 0},
{"hive_db where name = 'Reporting' limit 10 offset 0", 1, 40, 0},
{"hive_table where db.name = 'Reporting' limit 10", 4, 1, 0},
};
......@@ -171,7 +172,7 @@ public class DSLQueriesTest extends BasicTestSetup {
private Object[][] syntaxVerifierQueries() {
return new Object[][]{
{"hive_column limit 10 ", 10},
{"hive_column select hive_column.name limit 10 ", 10},
{"hive_column select hive_column.qualifiedName limit 10 ", 10},
{"from hive_db", 3},
{"from hive_db limit 2", 2},
{"from hive_db limit 2 offset 0", 2},
......@@ -210,13 +211,13 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_column where hive_column isa PII limit 5 offset 1", 3},
{"hive_column where hive_column isa PII limit 5 offset 5", 0},
{"hive_column select hive_column.name", 17},
{"hive_column select hive_column.name limit 5", 5},
{"hive_column select hive_column.name limit 5 offset 36", 0},
{"hive_column select hive_column.qualifiedName", 17},
{"hive_column select hive_column.qualifiedName limit 5", 5},
{"hive_column select hive_column.qualifiedName limit 5 offset 36", 0},
{"hive_column select name", 17},
{"hive_column select name limit 5", 5},
{"hive_column select name limit 5 offset 36 ", 0},
{"hive_column select qualifiedName", 17},
{"hive_column select qualifiedName limit 5", 5},
{"hive_column select qualifiedName limit 5 offset 36 ", 0},
{"hive_column where hive_column.name=\"customer_id\"", 2},
{"hive_column where hive_column.name=\"customer_id\" limit 2", 2},
......@@ -269,19 +270,19 @@ public class DSLQueriesTest extends BasicTestSetup {
private Object[][] orderByQueries() {
return new Object[][]{
{"from hive_db as h orderby h.owner limit 3", 3, "owner", true},
{"hive_column as c select c.name orderby hive_column.name ", 17, "c.name", true},
{"hive_column as c select c.name orderby hive_column.name limit 5", 5, "c.name", true},
{"hive_column as c select c.name orderby hive_column.name desc limit 5", 5, "c.name", false},
{"hive_column as c select c.qualifiedName orderby hive_column.qualifiedName ", 17, "c.qualifiedName", true},
{"hive_column as c select c.qualifiedName orderby hive_column.qualifiedName limit 5", 5, "c.qualifiedName", true},
{"hive_column as c select c.qualifiedName orderby hive_column.qualifiedName desc limit 5", 5, "c.qualifiedName", false},
{"from hive_db orderby hive_db.owner limit 3", 3, "owner", true},
{"hive_column select hive_column.name orderby hive_column.name ", 17, "hive_column.name", true},
{"hive_column select hive_column.name orderby hive_column.name limit 5", 5, "hive_column.name", true},
{"hive_column select hive_column.name orderby hive_column.name desc limit 5", 5, "hive_column.name", false},
{"hive_column select hive_column.qualifiedName orderby hive_column.qualifiedName ", 17, "hive_column.qualifiedName", true},
{"hive_column select hive_column.qualifiedName orderby hive_column.qualifiedName limit 5", 5, "hive_column.qualifiedName", true},
{"hive_column select hive_column.qualifiedName orderby hive_column.qualifiedName desc limit 5", 5, "hive_column.qualifiedName", false},
{"from hive_db orderby owner limit 3", 3, "owner", true},
{"hive_column select hive_column.name orderby name ", 17, "hive_column.name", true},
{"hive_column select hive_column.name orderby name limit 5", 5, "hive_column.name", true},
{"hive_column select hive_column.name orderby name desc limit 5", 5, "hive_column.name", false},
{"hive_column select hive_column.qualifiedName orderby qualifiedName ", 17, "hive_column.qualifiedName", true},
{"hive_column select hive_column.qualifiedName orderby qualifiedName limit 5", 5, "hive_column.qualifiedName", true},
{"hive_column select hive_column.qualifiedName orderby qualifiedName desc limit 5", 5, "hive_column.qualifiedName", false},
{"from hive_db orderby hive_db.owner limit 3", 3, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby 'owner'", 1, "owner", true},
......@@ -300,15 +301,15 @@ public class DSLQueriesTest extends BasicTestSetup {
{"hive_table orderby hive_table.owner limit 8 offset 0", 8, "owner", true},
{"hive_table orderby hive_table.owner desc limit 8 offset 0", 8, "owner", false},
{"hive_column select hive_column.name orderby hive_column.name ", 17, "hive_column.name", true},
{"hive_column select hive_column.name orderby hive_column.name limit 5", 5, "hive_column.name", true},
{"hive_column select hive_column.name orderby hive_column.name desc limit 5", 5, "hive_column.name", false},
{"hive_column select hive_column.qualifiedName orderby hive_column.qualifiedName ", 17, "hive_column.qualifiedName", true},
{"hive_column select hive_column.qualifiedName orderby hive_column.qualifiedName limit 5", 5, "hive_column.qualifiedName", true},
{"hive_column select hive_column.qualifiedName orderby hive_column.qualifiedName desc limit 5", 5, "hive_column.qualifiedName", false},
{"hive_column select hive_column.name orderby hive_column.name limit 5 offset 2", 5, "hive_column.name", true},
{"hive_column select hive_column.qualifiedName orderby hive_column.qualifiedName limit 5 offset 2", 5, "hive_column.qualifiedName", true},
{"hive_column select name orderby hive_column.name", 17, "name", true},
{"hive_column select name orderby hive_column.name limit 5", 5, "name", true},
{"hive_column select name orderby hive_column.name desc", 17, "name", false},
{"hive_column select qualifiedName orderby hive_column.qualifiedName", 17, "qualifiedName", true},
{"hive_column select qualifiedName orderby hive_column.qualifiedName limit 5", 5, "qualifiedName", true},
{"hive_column select qualifiedName orderby hive_column.qualifiedName desc", 17, "qualifiedName", false},
{"hive_column where hive_column.name=\"customer_id\" orderby hive_column.name", 2, "name", true},
{"hive_column where hive_column.name=\"customer_id\" orderby hive_column.name limit 2", 2, "name", true},
......@@ -368,7 +369,7 @@ public class DSLQueriesTest extends BasicTestSetup {
private void assertSearchResult(AtlasSearchResult searchResult, int expected) {
assertNotNull(searchResult);
if(expected == 0) {
assertNull(searchResult.getAttributes());
assertTrue(searchResult.getAttributes() == null || CollectionUtils.isEmpty(searchResult.getAttributes().getValues()));
assertNull(searchResult.getEntities());
} else if(searchResult.getEntities() != null) {
assertEquals(searchResult.getEntities().size(), expected);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment