Commit fad5c8e6 by gss2002

Commit fixes to HiveLineageInfo HiveHook and hivetypes.pom.xml to remove

bad dependency on calcite-0.9.2-incubating-snapshot which is no longer useable and Hive POM needs to be updated at Central for Hive 0.14 and Hive 1.0 to make it calcite-0.9.2-incubating. Removed where/groupby parsing for now as value is limited and doesnt parse nested queries well. Handle createtable/createview and prepare to handle alters and inserts
parent 2181803a
...@@ -56,26 +56,86 @@ ...@@ -56,26 +56,86 @@
<groupId>org.apache.hive</groupId> <groupId>org.apache.hive</groupId>
<artifactId>hive-metastore</artifactId> <artifactId>hive-metastore</artifactId>
<version>0.14.0</version> <version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hive</groupId> <groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId> <artifactId>hive-exec</artifactId>
<version>0.14.0</version> <version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hive</groupId> <groupId>org.apache.hive</groupId>
<artifactId>hive-common</artifactId> <artifactId>hive-common</artifactId>
<version>0.14.0</version> <version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hive</groupId> <groupId>org.apache.hive</groupId>
<artifactId>hive-cli</artifactId> <artifactId>hive-cli</artifactId>
<version>0.14.0</version> <version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hive</groupId> <groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId> <artifactId>hive-jdbc</artifactId>
<version>0.14.0</version> <version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
<version>0.9.2-incubating</version>
</dependency>
<dependency>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
<version>0.9.2-incubating</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.code.gson</groupId> <groupId>com.google.code.gson</groupId>
......
...@@ -87,25 +87,29 @@ public class Hook implements ExecuteWithHookContext { ...@@ -87,25 +87,29 @@ public class Hook implements ExecuteWithHookContext {
executionEngine="mr"; executionEngine="mr";
} }
hiveId = sess.getSessionId(); hiveId = sess.getSessionId();
String defaultdb = null;
switch(hookContext.getHookType()) { switch(hookContext.getHookType()) {
case PRE_EXEC_HOOK: case PRE_EXEC_HOOK:
Set<ReadEntity> db = hookContext.getInputs(); Set<ReadEntity> db_pre = hookContext.getInputs();
for (Object o : db) { for (Object o : db_pre) {
LOG.debug("DB:Table="+o.toString()); LOG.debug("DB:Table="+o.toString());
defaultdb = o.toString().split("@")[0];
} }
currentTime = System.currentTimeMillis(); currentTime = System.currentTimeMillis();
HiveLineageInfo lep_pre = new HiveLineageInfo(); HiveLineageInfo lep_pre = new HiveLineageInfo();
lep_pre.getLineageInfo(query); lep_pre.getLineageInfo(query);
hlb=lep_pre.getHLBean(); hlb=lep_pre.getHLBean();
hlb.setDatabaseName(defaultdb);
hlb.setQueryEndTime(Long.toString(currentTime)); hlb.setQueryEndTime(Long.toString(currentTime));
hlb.setQueryId(queryId); hlb.setQueryId(queryId);
hlb.setQuery(query); hlb.setQuery(query);
hlb.setUser(user); hlb.setUser(user);
hlb.setHiveId(hiveId); hlb.setHiveId(hiveId);
hlb.setSuccess(false); hlb.setSuccess(false);
if (executionEngine != null ) {
if (executionEngine.equalsIgnoreCase("mr")) { if (executionEngine.equalsIgnoreCase("mr")) {
hlb.setExecutionEngine("mapreduce"); hlb.setExecutionEngine("mapreduce");
} }
...@@ -115,15 +119,24 @@ public class Hook implements ExecuteWithHookContext { ...@@ -115,15 +119,24 @@ public class Hook implements ExecuteWithHookContext {
if (executionEngine.equalsIgnoreCase("spark")) { if (executionEngine.equalsIgnoreCase("spark")) {
hlb.setExecutionEngine("spark"); hlb.setExecutionEngine("spark");
} }
} else {
hlb.setExecutionEngine("local");
}
hlb.setQueryStartTime(queryStartTime); hlb.setQueryStartTime(queryStartTime);
fireAndForget(hookContext.getConf(), hlb, queryId); fireAndForget(hookContext.getConf(), hlb, queryId);
break; break;
case POST_EXEC_HOOK: case POST_EXEC_HOOK:
Set<ReadEntity> db_post = hookContext.getInputs();
for (Object o : db_post) {
LOG.debug("DB:Table="+o.toString());
defaultdb = o.toString().split("@")[0];
}
currentTime = System.currentTimeMillis(); currentTime = System.currentTimeMillis();
HiveLineageInfo lep_post = new HiveLineageInfo(); HiveLineageInfo lep_post = new HiveLineageInfo();
lep_post.getLineageInfo(query); lep_post.getLineageInfo(query);
hlb=lep_post.getHLBean(); hlb=lep_post.getHLBean();
hlb.setDatabaseName(defaultdb);
hlb.setQueryEndTime(Long.toString(currentTime)); hlb.setQueryEndTime(Long.toString(currentTime));
hlb.setQueryId(queryId); hlb.setQueryId(queryId);
hlb.setQuery(query); hlb.setQuery(query);
...@@ -131,6 +144,7 @@ public class Hook implements ExecuteWithHookContext { ...@@ -131,6 +144,7 @@ public class Hook implements ExecuteWithHookContext {
hlb.setQueryStartTime(queryStartTime); hlb.setQueryStartTime(queryStartTime);
hlb.setSuccess(true); hlb.setSuccess(true);
hlb.setHiveId(hiveId); hlb.setHiveId(hiveId);
if (executionEngine != null ) {
if (executionEngine.equalsIgnoreCase("mr")) { if (executionEngine.equalsIgnoreCase("mr")) {
hlb.setExecutionEngine("mapreduce"); hlb.setExecutionEngine("mapreduce");
} }
...@@ -140,13 +154,22 @@ public class Hook implements ExecuteWithHookContext { ...@@ -140,13 +154,22 @@ public class Hook implements ExecuteWithHookContext {
if (executionEngine.equalsIgnoreCase("spark")) { if (executionEngine.equalsIgnoreCase("spark")) {
hlb.setExecutionEngine("spark"); hlb.setExecutionEngine("spark");
} }
} else {
hlb.setExecutionEngine("local");
}
fireAndForget(hookContext.getConf(), hlb, queryId); fireAndForget(hookContext.getConf(), hlb, queryId);
break; break;
case ON_FAILURE_HOOK: case ON_FAILURE_HOOK:
Set<ReadEntity> db_fail = hookContext.getInputs();
for (Object o : db_fail) {
LOG.debug("DB:Table="+o.toString());
defaultdb = o.toString().split("@")[0];
}
HiveLineageInfo lep_failed = new HiveLineageInfo(); HiveLineageInfo lep_failed = new HiveLineageInfo();
lep_failed.getLineageInfo(query); lep_failed.getLineageInfo(query);
hlb=lep_failed.getHLBean(); hlb=lep_failed.getHLBean();
hlb.setDatabaseName(defaultdb);
hlb.setQueryEndTime(Long.toString(currentTime)); hlb.setQueryEndTime(Long.toString(currentTime));
hlb.setQueryId(queryId); hlb.setQueryId(queryId);
hlb.setQuery(query); hlb.setQuery(query);
...@@ -155,6 +178,7 @@ public class Hook implements ExecuteWithHookContext { ...@@ -155,6 +178,7 @@ public class Hook implements ExecuteWithHookContext {
hlb.setSuccess(false); hlb.setSuccess(false);
hlb.setFailed(true); hlb.setFailed(true);
hlb.setHiveId(hiveId); hlb.setHiveId(hiveId);
if (executionEngine != null ) {
if (executionEngine.equalsIgnoreCase("mr")) { if (executionEngine.equalsIgnoreCase("mr")) {
hlb.setExecutionEngine("mapreduce"); hlb.setExecutionEngine("mapreduce");
} }
...@@ -164,6 +188,9 @@ public class Hook implements ExecuteWithHookContext { ...@@ -164,6 +188,9 @@ public class Hook implements ExecuteWithHookContext {
if (executionEngine.equalsIgnoreCase("spark")) { if (executionEngine.equalsIgnoreCase("spark")) {
hlb.setExecutionEngine("spark"); hlb.setExecutionEngine("spark");
} }
} else {
hlb.setExecutionEngine("local");
}
fireAndForget(hookContext.getConf(), hlb, queryId); fireAndForget(hookContext.getConf(), hlb, queryId);
break; break;
default: default:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment