Commit 1ddf3137 by Shwetha GS

ATLAS-747 Hive CTAS entity registration fails because userName is null (shwethags)

parent 53bbebcb
...@@ -33,7 +33,6 @@ import org.apache.hadoop.fs.Path; ...@@ -33,7 +33,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.ExplainTask;
import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Task;
...@@ -262,7 +261,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -262,7 +261,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
event.setJsonPlan(getQueryPlan(hookContext.getConf(), hookContext.getQueryPlan())); event.setJsonPlan(getQueryPlan(hookContext.getConf(), hookContext.getQueryPlan()));
event.setHookType(hookContext.getHookType()); event.setHookType(hookContext.getHookType());
event.setUgi(hookContext.getUgi()); event.setUgi(hookContext.getUgi());
event.setUser(hookContext.getUserName()); event.setUser(getUser(hookContext.getUserName()));
event.setOperation(OPERATION_MAP.get(hookContext.getOperationName())); event.setOperation(OPERATION_MAP.get(hookContext.getOperationName()));
event.setQueryId(hookContext.getQueryPlan().getQueryId()); event.setQueryId(hookContext.getQueryPlan().getQueryId());
event.setQueryStr(hookContext.getQueryPlan().getQueryStr()); event.setQueryStr(hookContext.getQueryPlan().getQueryStr());
...@@ -306,6 +305,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -306,6 +305,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
break; break;
case CREATETABLE_AS_SELECT: case CREATETABLE_AS_SELECT:
case CREATEVIEW: case CREATEVIEW:
case ALTERVIEW_AS: case ALTERVIEW_AS:
case LOAD: case LOAD:
...@@ -619,7 +619,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -619,7 +619,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
explain.initialize(hiveConf, queryPlan, null); explain.initialize(hiveConf, queryPlan, null);
List<Task<?>> rootTasks = queryPlan.getRootTasks(); List<Task<?>> rootTasks = queryPlan.getRootTasks();
return explain.getJSONPlan(null, null, rootTasks, queryPlan.getFetchTask(), true, false, false); return explain.getJSONPlan(null, null, rootTasks, queryPlan.getFetchTask(), true, false, false);
} catch (Exception e) { } catch (Throwable e) {
LOG.info("Failed to get queryplan", e); LOG.info("Failed to get queryplan", e);
return new JSONObject(); return new JSONObject();
} }
...@@ -627,8 +627,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -627,8 +627,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
private boolean isSelectQuery(HiveEventContext event) { private boolean isSelectQuery(HiveEventContext event) {
if (event.getOperation() == HiveOperation.QUERY) { if (event.getOperation() == HiveOperation.QUERY) {
Set<WriteEntity> outputs = event.getOutputs();
//Select query has only one output //Select query has only one output
if (event.getOutputs().size() == 1) { if (event.getOutputs().size() == 1) {
WriteEntity output = event.getOutputs().iterator().next(); WriteEntity output = event.getOutputs().iterator().next();
......
...@@ -46,6 +46,7 @@ import org.apache.hadoop.hive.ql.hooks.Entity; ...@@ -46,6 +46,7 @@ import org.apache.hadoop.hive.ql.hooks.Entity;
import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.security.UserGroupInformation;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
...@@ -88,7 +89,7 @@ public class HiveHookIT { ...@@ -88,7 +89,7 @@ public class HiveHookIT {
conf.set("fs.default.name", "file:///'"); conf.set("fs.default.name", "file:///'");
conf.setClassLoader(Thread.currentThread().getContextClassLoader()); conf.setClassLoader(Thread.currentThread().getContextClassLoader());
driver = new Driver(conf); driver = new Driver(conf);
ss = new SessionState(conf, System.getProperty("user.name")); ss = new SessionState(conf);
ss = SessionState.start(ss); ss = SessionState.start(ss);
SessionState.setCurrentSessionState(ss); SessionState.setCurrentSessionState(ss);
...@@ -238,6 +239,7 @@ public class HiveHookIT { ...@@ -238,6 +239,7 @@ public class HiveHookIT {
String tableId = assertTableIsRegistered(dbName, tableName); String tableId = assertTableIsRegistered(dbName, tableName);
Referenceable processReference = validateProcess(query, 1, 1); Referenceable processReference = validateProcess(query, 1, 1);
assertEquals(processReference.get("userName"), UserGroupInformation.getCurrentUser().getShortUserName());
verifyTimestamps(processReference, "startTime"); verifyTimestamps(processReference, "startTime");
verifyTimestamps(processReference, "endTime"); verifyTimestamps(processReference, "endTime");
......
...@@ -130,6 +130,10 @@ public abstract class AtlasHook { ...@@ -130,6 +130,10 @@ public abstract class AtlasHook {
return getUser(null, null); return getUser(null, null);
} }
public static String getUser(String userName) {
return getUser(userName, null);
}
/** /**
* Returns the user. Order of preference: * Returns the user. Order of preference:
* 1. Given userName * 1. Given userName
......
...@@ -18,6 +18,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset ...@@ -18,6 +18,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags) ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES: ALL CHANGES:
ATLAS-747 Hive CTAS entity registration fails because userName is null (shwethags)
ATLAS-759 HiveHookIT.testAlterTableChangeColumn is consistently failing on master (yhemanth) ATLAS-759 HiveHookIT.testAlterTableChangeColumn is consistently failing on master (yhemanth)
ATLAS-690 Read timed out exceptions when tables are imported into Atlas (yhemanth via shwethags) ATLAS-690 Read timed out exceptions when tables are imported into Atlas (yhemanth via shwethags)
ATLAS-585 NotificationHookConsumer creates new AtlasClient for every message (shwethags) ATLAS-585 NotificationHookConsumer creates new AtlasClient for every message (shwethags)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment