Commit 5384a742 by Madhan Neethiraj

ATLAS-2229: fix for unit test failures in DSL tests; also fixed incorrect…

ATLAS-2229: fix for unit test failures in DSL tests; also fixed incorrect handling of orderby/groupby when limit/offset is present
parent 2a2b07e9
...@@ -344,7 +344,7 @@ public class AtlasJanusGraph implements AtlasGraph<AtlasJanusVertex, AtlasJanusE ...@@ -344,7 +344,7 @@ public class AtlasJanusGraph implements AtlasGraph<AtlasJanusVertex, AtlasJanusE
return result; return result;
} catch (ScriptException e) { } catch (ScriptException e) {
throw new AtlasBaseException(AtlasErrorCode.GREMLIN_SCRIPT_EXECUTION_FAILED, gremlinQuery); throw new AtlasBaseException(AtlasErrorCode.GREMLIN_SCRIPT_EXECUTION_FAILED, e, gremlinQuery);
} finally { } finally {
releaseGremlinScriptEngine(scriptEngine); releaseGremlinScriptEngine(scriptEngine);
} }
......
...@@ -281,8 +281,7 @@ public class QueryProcessor { ...@@ -281,8 +281,7 @@ public class QueryProcessor {
LOG.debug("addGroupBy(item={})", item); LOG.debug("addGroupBy(item={})", item);
} }
add(GremlinClause.GROUP); addGroupByClause(item);
addByClause(item, false);
hasGrpBy = true; hasGrpBy = true;
} }
...@@ -347,8 +346,7 @@ public class QueryProcessor { ...@@ -347,8 +346,7 @@ public class QueryProcessor {
LOG.debug("addOrderBy(name={}, isDesc={})", name, isDesc); LOG.debug("addOrderBy(name={}, isDesc={})", name, isDesc);
} }
add(GremlinClause.ORDER); addOrderByClause(name, isDesc);
addByClause(name, isDesc);
} }
private void updatePosition(GremlinClause clause) { private void updatePosition(GremlinClause clause) {
...@@ -407,22 +405,29 @@ public class QueryProcessor { ...@@ -407,22 +405,29 @@ public class QueryProcessor {
} }
} }
private void addByClause(String name, boolean descr) { private void addOrderByClause(String name, boolean descr) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("addByClause(name={})", name, descr); LOG.debug("addOrderByClause(name={})", name, descr);
} }
IdentifierHelper.Advice ia = getAdvice(name); IdentifierHelper.Advice ia = getAdvice(name);
add((!descr) ? GremlinClause.BY : GremlinClause.BY_DESC, ia.getQualifiedName()); add((!descr) ? GremlinClause.ORDER_BY : GremlinClause.ORDER_BY_DESC, ia.getQualifiedName());
}
private void addGroupByClause(String name) {
if (LOG.isDebugEnabled()) {
LOG.debug("addGroupByClause(name={})", name);
}
IdentifierHelper.Advice ia = getAdvice(name);
add(GremlinClause.GROUP_BY, ia.getQualifiedName());
} }
private enum GremlinClause { private enum GremlinClause {
AS("as('%s')"), AS("as('%s')"),
BY("by('%s')"),
BY_DESC("by('%s', decr)"),
DEDUP("dedup()"), DEDUP("dedup()"),
G("g"), G("g"),
GROUP("group()"), GROUP_BY("group().by('%')"),
HAS("has('%s', %s)"), HAS("has('%s', %s)"),
HAS_OPERATOR("has('%s', %s(%s))"), HAS_OPERATOR("has('%s', %s(%s))"),
HAS_PROPERTY("has('%s')"), HAS_PROPERTY("has('%s')"),
...@@ -436,7 +441,8 @@ public class QueryProcessor { ...@@ -436,7 +441,8 @@ public class QueryProcessor {
NESTED_START("__"), NESTED_START("__"),
NESTED_HAS_OPERATOR("has('%s', %s(%s))"), NESTED_HAS_OPERATOR("has('%s', %s(%s))"),
LIMIT("limit(%s)"), LIMIT("limit(%s)"),
ORDER("order()"), ORDER_BY("order().by('%s')"),
ORDER_BY_DESC("order().by('%s', decr)"),
OUT("out('%s')"), OUT("out('%s')"),
RANGE("range(%s, %s + %s)"), RANGE("range(%s, %s + %s)"),
SELECT("select('%s')"), SELECT("select('%s')"),
......
...@@ -148,6 +148,7 @@ public abstract class BasicTestSetup { ...@@ -148,6 +148,7 @@ public abstract class BasicTestSetup {
entities.addAll(salesFactColumns); entities.addAll(salesFactColumns);
AtlasEntity salesFact = table("sales_fact", "sales fact table", salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact"); AtlasEntity salesFact = table("sales_fact", "sales fact table", salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact");
salesFact.setAttribute("createTime", new Date(2018, 01, 01));
entities.add(salesFact); entities.add(salesFact);
List<AtlasEntity> logFactColumns = ImmutableList List<AtlasEntity> logFactColumns = ImmutableList
...@@ -179,7 +180,7 @@ public abstract class BasicTestSetup { ...@@ -179,7 +180,7 @@ public abstract class BasicTestSetup {
AtlasEntity circularLineageTable1 = table("table1", "", reportingDB, sd, "Vimal", "Managed", salesFactColumns, "Metric"); AtlasEntity circularLineageTable1 = table("table1", "", reportingDB, sd, "Vimal", "Managed", salesFactColumns, "Metric");
entities.add(circularLineageTable1); entities.add(circularLineageTable1);
AtlasEntity circularLineageTable2 = table("table2", "", reportingDB, sd, "Vimal", "Managed", salesFactColumns, "Metric"); AtlasEntity circularLineageTable2 = table("table2", "", reportingDB, sd, "Vimal 2", "Managed", salesFactColumns, "Metric");
entities.add(circularLineageTable2); entities.add(circularLineageTable2);
AtlasEntity circularLineage1Process = loadProcess("circularLineage1", "hive query for daily summary", "John ETL", ImmutableList.of(circularLineageTable1), AtlasEntity circularLineage1Process = loadProcess("circularLineage1", "hive query for daily summary", "John ETL", ImmutableList.of(circularLineageTable1),
...@@ -209,7 +210,7 @@ public abstract class BasicTestSetup { ...@@ -209,7 +210,7 @@ public abstract class BasicTestSetup {
entities.addAll(productDimColumns); entities.addAll(productDimColumns);
AtlasEntity productDim = AtlasEntity productDim =
table("product_dim", "product dimension table", salesDB, sd, "John Doe", "Managed", productDimColumns, table("product_dim", "product dimension table", salesDB, sd, "John Doe 2", "Managed", productDimColumns,
"Dimension"); "Dimension");
entities.add(productDim); entities.add(productDim);
...@@ -240,7 +241,7 @@ public abstract class BasicTestSetup { ...@@ -240,7 +241,7 @@ public abstract class BasicTestSetup {
entities.add(loadSalesMonthly); entities.add(loadSalesMonthly);
AtlasEntity loggingFactMonthly = AtlasEntity loggingFactMonthly =
table("logging_fact_monthly_mv", "logging fact monthly materialized view", logDB, sd, "Tim ETL", table("logging_fact_monthly_mv", "logging fact monthly materialized view", logDB, sd, "Tim ETL 2",
"Managed", logFactColumns, "Log Data"); "Managed", logFactColumns, "Log Data");
entities.add(loggingFactMonthly); entities.add(loggingFactMonthly);
...@@ -318,9 +319,12 @@ public abstract class BasicTestSetup { ...@@ -318,9 +319,12 @@ public abstract class BasicTestSetup {
protected AtlasEntity table(String name, String description, AtlasEntity db, AtlasEntity sd, String owner, String tableType, protected AtlasEntity table(String name, String description, AtlasEntity db, AtlasEntity sd, String owner, String tableType,
List<AtlasEntity> columns, String... traitNames) { List<AtlasEntity> columns, String... traitNames) {
String dbName = db.getAttribute(AtlasClient.NAME).toString();
String clusterName = db.getAttribute("clusterName").toString();
AtlasEntity table = new AtlasEntity(HIVE_TABLE_TYPE); AtlasEntity table = new AtlasEntity(HIVE_TABLE_TYPE);
table.setAttribute("name", name); table.setAttribute("name", name);
table.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "qualified:" + name); table.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName + "." + name);
table.setAttribute("description", description); table.setAttribute("description", description);
table.setAttribute("owner", owner); table.setAttribute("owner", owner);
table.setAttribute("tableType", tableType); table.setAttribute("tableType", tableType);
...@@ -335,6 +339,12 @@ public abstract class BasicTestSetup { ...@@ -335,6 +339,12 @@ public abstract class BasicTestSetup {
table.setAttribute("columns", getAtlasObjectIds(columns)); table.setAttribute("columns", getAtlasObjectIds(columns));
table.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList())); table.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList()));
sd.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName + "." + name + "@" + clusterName + "_storage");
for (AtlasEntity column : columns) {
column.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName + "." + name + "." + column.getAttribute(AtlasClient.NAME).toString() + "@" + clusterName);
}
return table; return table;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment