Commit 42dee9f3 by a760104

Remove com.aetna naming and renamed it:

org.apache.hadoop.metadata.hive.* so the hook will now be: org.apache.hadoop.metadata.hive.Hook for the pre/post/failure hook.
parent 34cdef44
package com.aetna.hadoop.dgc.hive; package org.apache.hadoop.metadata.hive;
import java.io.Serializable; import java.io.Serializable;
import java.util.List; import java.util.List;
......
package com.aetna.hadoop.dgc.hive; package org.apache.hadoop.metadata.hive;
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
...@@ -40,17 +40,17 @@ import org.apache.hadoop.hive.ql.parse.HiveParser; ...@@ -40,17 +40,17 @@ import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseException; import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.metadata.hive.HiveLineageBean.CreateColumns;
import org.apache.hadoop.metadata.hive.HiveLineageBean.GroupBy;
import org.apache.hadoop.metadata.hive.HiveLineageBean.QueryColumns;
import org.apache.hadoop.metadata.hive.HiveLineageBean.SourceTables;
import org.apache.hadoop.metadata.hive.HiveLineageBean.WhereClause;
import org.apache.log4j.ConsoleAppender; import org.apache.log4j.ConsoleAppender;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout; import org.apache.log4j.PatternLayout;
import com.aetna.hadoop.dgc.hive.HiveLineageBean.CreateColumns;
import com.aetna.hadoop.dgc.hive.HiveLineageBean.GroupBy;
import com.aetna.hadoop.dgc.hive.HiveLineageBean.QueryColumns;
import com.aetna.hadoop.dgc.hive.HiveLineageBean.SourceTables;
import com.aetna.hadoop.dgc.hive.HiveLineageBean.WhereClause;
import com.google.gson.Gson; import com.google.gson.Gson;
/** /**
......
package com.aetna.hadoop.dgc.hive; package org.apache.hadoop.metadata.hive;
import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.common.util.concurrent.ThreadFactoryBuilder;
...@@ -169,8 +169,8 @@ public class Hook implements ExecuteWithHookContext { ...@@ -169,8 +169,8 @@ public class Hook implements ExecuteWithHookContext {
public void fireAndForget(Configuration conf, HiveLineageBean hookData, String queryId) throws Exception { public void fireAndForget(Configuration conf, HiveLineageBean hookData, String queryId) throws Exception {
String postUri = "http://167.69.111.50:20810/HiveHookCollector/HookServlet"; String postUri = "http://167.69.111.50:20810/HiveHookCollector/HookServlet";
if (conf.getTrimmed("aetna.hive.hook") != null) { if (conf.getTrimmed("hadoop.metadata.hive.hook.uri") != null) {
postUri = conf.getTrimmed("aetna.hive.hook"); postUri = conf.getTrimmed("hadoop.metadata.hive.hook.uri");
} }
Gson gson = new Gson(); Gson gson = new Gson();
String gsonString = gson.toJson(hookData); String gsonString = gson.toJson(hookData);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment