Commit dbe10615 by Shwetha GS

ATLAS-448 Hive IllegalArgumentException with Atlas hook enabled on SHOW…

ATLAS-448 Hive IllegalArgumentException with Atlas hook enabled on SHOW TRANSACTIONS AND SHOW COMPACTIONS (shwethags)
parent f3ac2c0f
...@@ -49,7 +49,9 @@ import org.slf4j.Logger; ...@@ -49,7 +49,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
...@@ -72,6 +74,8 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -72,6 +74,8 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
public static final String HOOK_NUM_RETRIES = CONF_PREFIX + "numRetries"; public static final String HOOK_NUM_RETRIES = CONF_PREFIX + "numRetries";
private static final Map<String, HiveOperation> OPERATION_MAP = new HashMap<>();
// wait time determines how long we wait before we exit the jvm on // wait time determines how long we wait before we exit the jvm on
// shutdown. Pending requests after that will not be sent. // shutdown. Pending requests after that will not be sent.
private static final int WAIT_TIME = 3; private static final int WAIT_TIME = 3;
...@@ -131,6 +135,8 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -131,6 +135,8 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
// shutdown client // shutdown client
} }
}); });
setupOperationMap();
} catch (Exception e) { } catch (Exception e) {
LOG.info("Attempting to send msg while shutdown in progress.", e); LOG.info("Attempting to send msg while shutdown in progress.", e);
} }
...@@ -140,6 +146,13 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -140,6 +146,13 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
LOG.info("Created Atlas Hook"); LOG.info("Created Atlas Hook");
} }
private static void setupOperationMap() {
//Populate OPERATION_MAP - string to HiveOperation mapping
for (HiveOperation hiveOperation : HiveOperation.values()) {
OPERATION_MAP.put(hiveOperation.getOperationName(), hiveOperation);
}
}
@Override @Override
protected String getNumberOfRetriesPropertyKey() { protected String getNumberOfRetriesPropertyKey() {
return HOOK_NUM_RETRIES; return HOOK_NUM_RETRIES;
...@@ -156,7 +169,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -156,7 +169,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
event.user = hookContext.getUserName() == null ? hookContext.getUgi().getUserName() : hookContext.getUserName(); event.user = hookContext.getUserName() == null ? hookContext.getUgi().getUserName() : hookContext.getUserName();
event.ugi = hookContext.getUgi(); event.ugi = hookContext.getUgi();
event.operation = HiveOperation.valueOf(hookContext.getOperationName()); event.operation = OPERATION_MAP.get(hookContext.getOperationName());
event.hookType = hookContext.getHookType(); event.hookType = hookContext.getHookType();
event.queryId = hookContext.getQueryPlan().getQueryId(); event.queryId = hookContext.getQueryPlan().getQueryId();
event.queryStr = hookContext.getQueryPlan().getQueryStr(); event.queryStr = hookContext.getQueryPlan().getQueryStr();
......
...@@ -20,11 +20,11 @@ package org.apache.atlas.hive.hook; ...@@ -20,11 +20,11 @@ package org.apache.atlas.hive.hook;
import org.apache.atlas.ApplicationProperties; import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasClient; import org.apache.atlas.AtlasClient;
import org.apache.atlas.utils.ParamChecker;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge; import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataModelGenerator; import org.apache.atlas.hive.model.HiveDataModelGenerator;
import org.apache.atlas.hive.model.HiveDataTypes; import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.utils.ParamChecker;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringEscapeUtils;
...@@ -32,6 +32,7 @@ import org.apache.commons.lang.StringUtils; ...@@ -32,6 +32,7 @@ import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
...@@ -43,6 +44,8 @@ import org.testng.annotations.Test; ...@@ -43,6 +44,8 @@ import org.testng.annotations.Test;
import java.io.File; import java.io.File;
import java.util.Map; import java.util.Map;
import static org.testng.Assert.assertEquals;
public class HiveHookIT { public class HiveHookIT {
public static final Logger LOG = org.slf4j.LoggerFactory.getLogger(HiveHookIT.class); public static final Logger LOG = org.slf4j.LoggerFactory.getLogger(HiveHookIT.class);
...@@ -67,8 +70,10 @@ public class HiveHookIT { ...@@ -67,8 +70,10 @@ public class HiveHookIT {
} }
private void runCommand(String cmd) throws Exception { private void runCommand(String cmd) throws Exception {
LOG.debug("Running command '{}'", cmd);
ss.setCommandType(null); ss.setCommandType(null);
driver.run(cmd); CommandProcessorResponse response = driver.run(cmd);
assertEquals(response.getResponseCode(), 0);
} }
@Test @Test
...@@ -149,17 +154,6 @@ public class HiveHookIT { ...@@ -149,17 +154,6 @@ public class HiveHookIT {
assertDatabaseIsRegistered(DEFAULT_DB); assertDatabaseIsRegistered(DEFAULT_DB);
} }
@Test
public void testRenameTable() throws Exception {
String tableName = createTable();
String newTableName = tableName();
runCommand(String.format("alter table %s rename to %s", tableName, newTableName));
assertTableIsRegistered(DEFAULT_DB, newTableName);
assertTableIsNotRegistered(DEFAULT_DB, tableName);
}
private String assertColumnIsRegistered(String colName) throws Exception { private String assertColumnIsRegistered(String colName) throws Exception {
LOG.debug("Searching for column {}", colName); LOG.debug("Searching for column {}", colName);
String query = String query =
...@@ -265,8 +259,8 @@ public class HiveHookIT { ...@@ -265,8 +259,8 @@ public class HiveHookIT {
assertProcessIsRegistered(query); assertProcessIsRegistered(query);
} }
@Test(enabled = false) @Test
public void testAlterTable() throws Exception { public void testAlterTableRename() throws Exception {
String tableName = createTable(); String tableName = createTable();
String newName = tableName(); String newName = tableName();
String query = "alter table " + tableName + " rename to " + newName; String query = "alter table " + tableName + " rename to " + newName;
...@@ -276,8 +270,8 @@ public class HiveHookIT { ...@@ -276,8 +270,8 @@ public class HiveHookIT {
assertTableIsNotRegistered(DEFAULT_DB, tableName); assertTableIsNotRegistered(DEFAULT_DB, tableName);
} }
@Test(enabled = false) @Test
public void testAlterView() throws Exception { public void testAlterViewRename() throws Exception {
String tableName = createTable(); String tableName = createTable();
String viewName = tableName(); String viewName = tableName();
String newName = tableName(); String newName = tableName();
...@@ -396,6 +390,13 @@ public class HiveHookIT { ...@@ -396,6 +390,13 @@ public class HiveHookIT {
Assert.assertTrue(vertices.has(table2Id)); Assert.assertTrue(vertices.has(table2Id));
} }
//For ATLAS-448
@Test
public void testNoopOperation() throws Exception {
runCommand("show compactions");
runCommand("show transactions");
}
public interface Predicate { public interface Predicate {
/** /**
......
...@@ -50,4 +50,14 @@ ...@@ -50,4 +50,14 @@
<name>fs.pfile.impl</name> <name>fs.pfile.impl</name>
<value>org.apache.hadoop.fs.ProxyLocalFileSystem</value> <value>org.apache.hadoop.fs.ProxyLocalFileSystem</value>
</property> </property>
<property>
<name>hive.in.test</name>
<value>true</value>
</property>
<property>
<name>hive.zookeeper.quorum</name>
<value>localhost:19026</value>
</property>
</configuration> </configuration>
\ No newline at end of file
...@@ -7,6 +7,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset ...@@ -7,6 +7,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags) ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES: ALL CHANGES:
ATLAS-448 Hive IllegalArgumentException with Atlas hook enabled on SHOW TRANSACTIONS AND SHOW COMPACTIONS (shwethags)
ATLAS-181 Integrate storm topology metadata into Atlas (svenkat,yhemanth via shwethags) ATLAS-181 Integrate storm topology metadata into Atlas (svenkat,yhemanth via shwethags)
ATLAS-311 UI: Local storage for traits - caching [not cleared on refresh] To be cleared on time lapse for 1hr (Anilg via shwethags) ATLAS-311 UI: Local storage for traits - caching [not cleared on refresh] To be cleared on time lapse for 1hr (Anilg via shwethags)
ATLAS-106 Store createTimestamp and modified timestamp separately for an entity (dkantor via shwethags) ATLAS-106 Store createTimestamp and modified timestamp separately for an entity (dkantor via shwethags)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment