Commit 1ddf3137 by Shwetha GS

ATLAS-747 Hive CTAS entity registration fails because userName is null (shwethags)

parent 53bbebcb
......@@ -33,7 +33,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.ExplainTask;
import org.apache.hadoop.hive.ql.exec.Task;
......@@ -262,7 +261,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
event.setJsonPlan(getQueryPlan(hookContext.getConf(), hookContext.getQueryPlan()));
event.setHookType(hookContext.getHookType());
event.setUgi(hookContext.getUgi());
event.setUser(hookContext.getUserName());
event.setUser(getUser(hookContext.getUserName()));
event.setOperation(OPERATION_MAP.get(hookContext.getOperationName()));
event.setQueryId(hookContext.getQueryPlan().getQueryId());
event.setQueryStr(hookContext.getQueryPlan().getQueryStr());
......@@ -306,6 +305,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
break;
case CREATETABLE_AS_SELECT:
case CREATEVIEW:
case ALTERVIEW_AS:
case LOAD:
......@@ -619,7 +619,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
explain.initialize(hiveConf, queryPlan, null);
List<Task<?>> rootTasks = queryPlan.getRootTasks();
return explain.getJSONPlan(null, null, rootTasks, queryPlan.getFetchTask(), true, false, false);
} catch (Exception e) {
} catch (Throwable e) {
LOG.info("Failed to get queryplan", e);
return new JSONObject();
}
......@@ -627,8 +627,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
private boolean isSelectQuery(HiveEventContext event) {
if (event.getOperation() == HiveOperation.QUERY) {
Set<WriteEntity> outputs = event.getOutputs();
//Select query has only one output
if (event.getOutputs().size() == 1) {
WriteEntity output = event.getOutputs().iterator().next();
......
......@@ -46,6 +46,7 @@ import org.apache.hadoop.hive.ql.hooks.Entity;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.security.UserGroupInformation;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
......@@ -88,7 +89,7 @@ public class HiveHookIT {
conf.set("fs.default.name", "file:///'");
conf.setClassLoader(Thread.currentThread().getContextClassLoader());
driver = new Driver(conf);
ss = new SessionState(conf, System.getProperty("user.name"));
ss = new SessionState(conf);
ss = SessionState.start(ss);
SessionState.setCurrentSessionState(ss);
......@@ -238,6 +239,7 @@ public class HiveHookIT {
String tableId = assertTableIsRegistered(dbName, tableName);
Referenceable processReference = validateProcess(query, 1, 1);
assertEquals(processReference.get("userName"), UserGroupInformation.getCurrentUser().getShortUserName());
verifyTimestamps(processReference, "startTime");
verifyTimestamps(processReference, "endTime");
......
......@@ -130,6 +130,10 @@ public abstract class AtlasHook {
return getUser(null, null);
}
public static String getUser(String userName) {
return getUser(userName, null);
}
/**
* Returns the user. Order of preference:
* 1. Given userName
......
......@@ -18,6 +18,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES:
ATLAS-747 Hive CTAS entity registration fails because userName is null (shwethags)
ATLAS-759 HiveHookIT.testAlterTableChangeColumn is consistently failing on master (yhemanth)
ATLAS-690 Read timed out exceptions when tables are imported into Atlas (yhemanth via shwethags)
ATLAS-585 NotificationHookConsumer creates new AtlasClient for every message (shwethags)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment