Commit ad36c887 by Suma Shivaprasad

ATLAS-294 Select queries(ex: from DB select DB.name) response contains column…

ATLAS-294 Select queries(ex: from DB select DB.name) response contains column names as _col_x instead of the actual names requested in the query.(thiyag via sumasai)
parent eec811ea
...@@ -297,7 +297,7 @@ public class HiveHookIT { ...@@ -297,7 +297,7 @@ public class HiveHookIT {
String query = String.format( String query = String.format(
"%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'" + " select t", "%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'" + " select t",
HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME); HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME);
return assertEntityIsRegistered(query); return assertEntityIsRegistered(query, "t");
} }
private String assertDatabaseIsRegistered(String dbName) throws Exception { private String assertDatabaseIsRegistered(String dbName) throws Exception {
...@@ -322,10 +322,10 @@ public class HiveHookIT { ...@@ -322,10 +322,10 @@ public class HiveHookIT {
+ "db where name = '%s' and clusterName = '%s' select p", typeName, value, + "db where name = '%s' and clusterName = '%s' select p", typeName, value,
tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME); tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME);
assertEntityIsRegistered(dslQuery); assertEntityIsRegistered(dslQuery, "p");
} }
private String assertEntityIsRegistered(final String query) throws Exception { private String assertEntityIsRegistered(final String query, String... arg) throws Exception {
waitFor(2000, new Predicate() { waitFor(2000, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
...@@ -334,6 +334,8 @@ public class HiveHookIT { ...@@ -334,6 +334,8 @@ public class HiveHookIT {
} }
}); });
String column = (arg.length > 0) ? arg[0] : "_col_0";
JSONArray results = dgiCLient.search(query); JSONArray results = dgiCLient.search(query);
JSONObject row = results.getJSONObject(0); JSONObject row = results.getJSONObject(0);
if (row.has("__guid")) { if (row.has("__guid")) {
...@@ -341,7 +343,7 @@ public class HiveHookIT { ...@@ -341,7 +343,7 @@ public class HiveHookIT {
} else if (row.has("$id$")) { } else if (row.has("$id$")) {
return row.getJSONObject("$id$").getString("id"); return row.getJSONObject("$id$").getString("id");
} else { } else {
return row.getJSONObject("_col_0").getString("id"); return row.getJSONObject(column).getString("id");
} }
} }
......
...@@ -9,6 +9,7 @@ ATLAS-54 Rename configs in hive hook (shwethags) ...@@ -9,6 +9,7 @@ ATLAS-54 Rename configs in hive hook (shwethags)
ATLAS-3 Mixed Index creation fails with Date types (sumasai via shwethags) ATLAS-3 Mixed Index creation fails with Date types (sumasai via shwethags)
ALL CHANGES: ALL CHANGES:
ATLAS-294 Select queries(ex: from DB select DB.name) response contains column names as "_col_x" instead of the actual names requested in the query.(thiyag via sumasai)
ATLAS-297 KafkaNotificationTest.testSendReceiveMessage fails when atlas-server is running on the same machine (yhemanth via shwethags) ATLAS-297 KafkaNotificationTest.testSendReceiveMessage fails when atlas-server is running on the same machine (yhemanth via shwethags)
ATLAS-306 change javadoc generation from 'package' to 'site' phase(jspeidel via sumasai) ATLAS-306 change javadoc generation from 'package' to 'site' phase(jspeidel via sumasai)
ATLAS-289 updateEntity does not remove existing edge for multiplicity-one reference (dkantor via shwethags) ATLAS-289 updateEntity does not remove existing edge for multiplicity-one reference (dkantor via shwethags)
......
...@@ -676,13 +676,11 @@ object Expressions { ...@@ -676,13 +676,11 @@ object Expressions {
override def toString = s"$child where $condExpr" override def toString = s"$child where $condExpr"
} }
val GEN_COL_ALIAS_PREFIX = "_col"
case class SelectExpression(child: Expression, selectList: List[Expression]) extends Expression { case class SelectExpression(child: Expression, selectList: List[Expression]) extends Expression {
val children = List(child) ::: selectList val children = List(child) ::: selectList
lazy val selectListWithAlias = selectList.zipWithIndex map { lazy val selectListWithAlias = selectList.zipWithIndex map {
case (s: AliasExpression, _) => s case (s: AliasExpression, _) => s
case (x, i) => new AliasExpression(x, s"${GEN_COL_ALIAS_PREFIX}_$i") case (x, i) => new AliasExpression(x, s"${x}")
} }
lazy val dataType = { lazy val dataType = {
......
...@@ -78,7 +78,7 @@ trait ExpressionUtils { ...@@ -78,7 +78,7 @@ trait ExpressionUtils {
def select(input: Expression, s: List[(Expression, Option[String])]) = { def select(input: Expression, s: List[(Expression, Option[String])]) = {
val selList = s.map { t => val selList = s.map { t =>
t._2 match { t._2 match {
case None => t._1 case None => t._1.as(s"${t._1}")
case _ => t._1.as(t._2.get) case _ => t._1.as(t._2.get)
} }
} }
......
...@@ -313,12 +313,12 @@ class GremlinTest extends BaseGremlinTest { ...@@ -313,12 +313,12 @@ class GremlinTest extends BaseGremlinTest {
val r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting"))). val r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting"))).
select(id("name"), id("owner")), g, gp) select(id("name"), id("owner")), g, gp)
validateJson(r, """{ validateJson(r, """{
| "query": "DB where (name = \"Reporting\") as _src1 select _src1.name as _col_0, _src1.owner as _col_1", | "query": "DB where (name = \"Reporting\") as _src1 select _src1.name as _src1.name, _src1.owner as _src1.owner",
| "dataType": { | "dataType": {
| "typeName": "__tempQueryResultStruct1", | "typeName": "__tempQueryResultStruct1",
| "attributeDefinitions": [ | "attributeDefinitions": [
| { | {
| "name": "_col_0", | "name": "_src1.name",
| "dataTypeName": "string", | "dataTypeName": "string",
| "multiplicity": { | "multiplicity": {
| "lower": 0, | "lower": 0,
...@@ -331,7 +331,7 @@ class GremlinTest extends BaseGremlinTest { ...@@ -331,7 +331,7 @@ class GremlinTest extends BaseGremlinTest {
| "reverseAttributeName": null | "reverseAttributeName": null
| }, | },
| { | {
| "name": "_col_1", | "name": "_src1.owner",
| "dataTypeName": "string", | "dataTypeName": "string",
| "multiplicity": { | "multiplicity": {
| "lower": 0, | "lower": 0,
...@@ -348,8 +348,8 @@ class GremlinTest extends BaseGremlinTest { ...@@ -348,8 +348,8 @@ class GremlinTest extends BaseGremlinTest {
| "rows": [ | "rows": [
| { | {
| "$typeName$": "__tempQueryResultStruct1", | "$typeName$": "__tempQueryResultStruct1",
| "_col_1": "Jane BI", | "_src1.owner": "Jane BI",
| "_col_0": "Reporting" | "_src1.name": "Reporting"
| } | }
| ] | ]
|}""".stripMargin); |}""".stripMargin);
...@@ -777,7 +777,7 @@ class GremlinTest extends BaseGremlinTest { ...@@ -777,7 +777,7 @@ class GremlinTest extends BaseGremlinTest {
@Test def testArith { @Test def testArith {
val r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting"))). val r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting"))).
select(id("name"), id("createTime") + int(1)), g, gp) select(id("name"), id("createTime") + int(1)), g, gp)
validateJson(r, "{\n \"query\":\"DB where (name = \\\"Reporting\\\") as _src1 select _src1.name as _col_0, (_src1.createTime + 1) as _col_1\",\n \"dataType\":{\n \"typeName\":\"__tempQueryResultStruct3\",\n \"attributeDefinitions\":[\n {\n \"name\":\"_col_0\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"_col_1\",\n \"dataTypeName\":\"int\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n }\n ]\n },\n \"rows\":[\n {\n \"$typeName$\":\"__tempQueryResultStruct3\",\n \"_col_1\":1501,\n \"_col_0\":\"Reporting\"\n }\n ]\n}") validateJson(r, "{\n \"query\":\"DB where (name = \\\"Reporting\\\") as _src1 select _src1.name as _src1.name, (_src1.createTime + 1) as (_src1.createTime + 1)\",\n \"dataType\":{\n \"typeName\":\"__tempQueryResultStruct3\",\n \"attributeDefinitions\":[\n {\n \"name\":\"_src1.name\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"(_src1.createTime + 1)\",\n \"dataTypeName\":\"int\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n }\n ]\n },\n \"rows\":[\n {\n \"$typeName$\":\"__tempQueryResultStruct3\",\n \"(_src1.createTime + 1)\":1501,\n \"_src1.name\":\"Reporting\"\n }\n ]\n}")
} }
@Test def testComparisonLogical { @Test def testComparisonLogical {
...@@ -906,12 +906,12 @@ class GremlinTest extends BaseGremlinTest { ...@@ -906,12 +906,12 @@ class GremlinTest extends BaseGremlinTest {
" db where name = 'Reporting' and clusterName = 'test' select p").right.get " db where name = 'Reporting' and clusterName = 'test' select p").right.get
val r = QueryProcessor.evaluate(e, g, gp) val r = QueryProcessor.evaluate(e, g, gp)
validateJson(r, """{ validateJson(r, """{
| "query":"Partition as p where (values = [\"2015-01-01\"]) table where (name = \"sales_fact_daily_mv\") db where (name = \"Reporting\") and (clusterName = \"test\") as _src1 select p as _col_0", | "query":"Partition as p where (values = [\"2015-01-01\"]) table where (name = \"sales_fact_daily_mv\") db where (name = \"Reporting\") and (clusterName = \"test\") as _src1 select p as p",
| "dataType":{ | "dataType":{
| "typeName":"__tempQueryResultStruct2", | "typeName":"__tempQueryResultStruct2",
| "attributeDefinitions":[ | "attributeDefinitions":[
| { | {
| "name":"_col_0", | "name":"p",
| "dataTypeName":"Partition", | "dataTypeName":"Partition",
| "multiplicity":{ | "multiplicity":{
| "lower":0, | "lower":0,
...@@ -928,7 +928,7 @@ class GremlinTest extends BaseGremlinTest { ...@@ -928,7 +928,7 @@ class GremlinTest extends BaseGremlinTest {
| "rows":[ | "rows":[
| { | {
| "$typeName$":"__tempQueryResultStruct2", | "$typeName$":"__tempQueryResultStruct2",
| "_col_0":{ | "p":{
| "$typeName$":"Partition", | "$typeName$":"Partition",
| "version":0 | "version":0
| } | }
...@@ -945,12 +945,12 @@ class GremlinTest extends BaseGremlinTest { ...@@ -945,12 +945,12 @@ class GremlinTest extends BaseGremlinTest {
val r = QueryProcessor.evaluate(e, g, gp) val r = QueryProcessor.evaluate(e, g, gp)
validateJson(r, validateJson(r,
"""{ """{
| "query":"Partition as p where (values = [\"2015-01-01\"]) table where (name = \"sales_fact_daily_mv\") db where (name = \"Reporting\") and (clusterName = \"test\") as _src1 select p.values as _col_0", | "query":"Partition as p where (values = [\"2015-01-01\"]) table where (name = \"sales_fact_daily_mv\") db where (name = \"Reporting\") and (clusterName = \"test\") as _src1 select p.values as p.values",
| "dataType":{ | "dataType":{
| "typeName":"__tempQueryResultStruct2", | "typeName":"__tempQueryResultStruct2",
| "attributeDefinitions":[ | "attributeDefinitions":[
| { | {
| "name":"_col_0", | "name":"p.values",
| "dataTypeName":"array<string>", | "dataTypeName":"array<string>",
| "multiplicity":{ | "multiplicity":{
| "lower":0, | "lower":0,
...@@ -967,7 +967,7 @@ class GremlinTest extends BaseGremlinTest { ...@@ -967,7 +967,7 @@ class GremlinTest extends BaseGremlinTest {
| "rows":[ | "rows":[
| { | {
| "$typeName$":"__tempQueryResultStruct2", | "$typeName$":"__tempQueryResultStruct2",
| "_col_0":[ | "p.values":[
| "2015-01-01" | "2015-01-01"
| ] | ]
| } | }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment