Commit 864cc5bc by Shwetha GS

ATLAS-54 Rename configs in hive hook (shwethags)

parent b273520e
...@@ -56,11 +56,11 @@ import java.util.Set; ...@@ -56,11 +56,11 @@ import java.util.Set;
*/ */
public class HiveMetaStoreBridge { public class HiveMetaStoreBridge {
private static final String DEFAULT_DGI_URL = "http://localhost:21000/"; private static final String DEFAULT_DGI_URL = "http://localhost:21000/";
public static final String HIVE_CLUSTER_NAME = "hive.cluster.name"; public static final String HIVE_CLUSTER_NAME = "atlas.cluster.name";
public static final String DEFAULT_CLUSTER_NAME = "primary"; public static final String DEFAULT_CLUSTER_NAME = "primary";
private final String clusterName; private final String clusterName;
public static final String DGI_URL_PROPERTY = "hive.hook.dgi.url"; public static final String ATLAS_ENDPOINT = "atlas.rest.address";
private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreBridge.class); private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreBridge.class);
...@@ -78,7 +78,7 @@ public class HiveMetaStoreBridge { ...@@ -78,7 +78,7 @@ public class HiveMetaStoreBridge {
public HiveMetaStoreBridge(HiveConf hiveConf, String doAsUser, UserGroupInformation ugi) throws Exception { public HiveMetaStoreBridge(HiveConf hiveConf, String doAsUser, UserGroupInformation ugi) throws Exception {
clusterName = hiveConf.get(HIVE_CLUSTER_NAME, DEFAULT_CLUSTER_NAME); clusterName = hiveConf.get(HIVE_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);
hiveClient = Hive.get(hiveConf); hiveClient = Hive.get(hiveConf);
atlasClient = new AtlasClient(hiveConf.get(DGI_URL_PROPERTY, DEFAULT_DGI_URL), ugi, doAsUser); atlasClient = new AtlasClient(hiveConf.get(ATLAS_ENDPOINT, DEFAULT_DGI_URL), ugi, doAsUser);
} }
public AtlasClient getAtlasClient() { public AtlasClient getAtlasClient() {
......
...@@ -60,9 +60,12 @@ public class HiveHook implements ExecuteWithHookContext { ...@@ -60,9 +60,12 @@ public class HiveHook implements ExecuteWithHookContext {
private static final int WAIT_TIME = 3; private static final int WAIT_TIME = 3;
private static ExecutorService executor; private static ExecutorService executor;
private static final String MIN_THREADS = "hive.hook.dgi.minThreads"; public static final String CONF_PREFIX = "atlas.hook.hive.";
private static final String MAX_THREADS = "hive.hook.dgi.maxThreads";
private static final String KEEP_ALIVE_TIME = "hive.hook.dgi.keepAliveTime"; private static final String MIN_THREADS = CONF_PREFIX + "minThreads";
private static final String MAX_THREADS = CONF_PREFIX + "maxThreads";
private static final String KEEP_ALIVE_TIME = CONF_PREFIX + "keepAliveTime";
public static final String CONF_SYNC = CONF_PREFIX + "synchronous";
private static final int minThreadsDefault = 5; private static final int minThreadsDefault = 5;
private static final int maxThreadsDefault = 5; private static final int maxThreadsDefault = 5;
...@@ -131,7 +134,7 @@ public class HiveHook implements ExecuteWithHookContext { ...@@ -131,7 +134,7 @@ public class HiveHook implements ExecuteWithHookContext {
// clone to avoid concurrent access // clone to avoid concurrent access
final HiveEvent event = new HiveEvent(); final HiveEvent event = new HiveEvent();
final HiveConf conf = new HiveConf(hookContext.getConf()); final HiveConf conf = new HiveConf(hookContext.getConf());
boolean debug = conf.get("hive.hook.dgi.synchronous", "false").equals("true"); boolean debug = conf.get(CONF_SYNC, "false").equals("true");
event.conf = conf; event.conf = conf;
event.inputs = hookContext.getInputs(); event.inputs = hookContext.getInputs();
......
...@@ -25,11 +25,11 @@ hive conf directory: ...@@ -25,11 +25,11 @@ hive conf directory:
* Atlas endpoint - Add the following property with the Atlas endpoint for your set-up * Atlas endpoint - Add the following property with the Atlas endpoint for your set-up
<verbatim> <verbatim>
<property> <property>
<name>hive.hook.dgi.url</name> <name>atlas.rest.address</name>
<value>http://localhost:21000/</value> <value>http://localhost:21000/</value>
</property> </property>
<property> <property>
<name>hive.cluster.name</name> <name>atlas.cluster.name</name>
<value>primary</value> <value>primary</value>
</property> </property>
</verbatim> </verbatim>
...@@ -50,21 +50,21 @@ The hook submits the request to a thread pool executor to avoid blocking the com ...@@ -50,21 +50,21 @@ The hook submits the request to a thread pool executor to avoid blocking the com
* Add the following properties in hive-ste.xml with the Atlas endpoint for your set-up * Add the following properties in hive-ste.xml with the Atlas endpoint for your set-up
<verbatim> <verbatim>
<property> <property>
<name>hive.hook.dgi.url</name> <name>atlas.rest.address</name>
<value>http://localhost:21000/</value> <value>http://localhost:21000/</value>
</property> </property>
<property> <property>
<name>hive.cluster.name</name> <name>atlas.cluster.name</name>
<value>primary</value> <value>primary</value>
</property> </property>
</verbatim> </verbatim>
* Add 'export HIVE_AUX_JARS_PATH=<dgi package>/hook/hive' in hive-env.sh * Add 'export HIVE_AUX_JARS_PATH=<dgi package>/hook/hive' in hive-env.sh
The following properties in hive-site.xml control the thread pool details: The following properties in hive-site.xml control the thread pool details:
* hive.hook.dgi.minThreads - core number of threads. default 5 * atlas.hook.hive.minThreads - core number of threads. default 5
* hive.hook.dgi.maxThreads - maximum number of threads. default 5 * atlas.hook.hive.maxThreads - maximum number of threads. default 5
* hive.hook.dgi.keepAliveTime - keep alive time in msecs. default 10 * atlas.hook.hive.keepAliveTime - keep alive time in msecs. default 10
* hive.hook.dgi.synchronous - boolean, true to run the hook synchronously. default false * atlas.hook.hive.synchronous - boolean, true to run the hook synchronously. default false
---++ Limitations ---++ Limitations
* Since database name, table name and column names are case insensitive in hive, the corresponding names in entities are lowercase. So, any search APIs should use lowercase while querying on the entity names * Since database name, table name and column names are case insensitive in hive, the corresponding names in entities are lowercase. So, any search APIs should use lowercase while querying on the entity names
......
...@@ -123,14 +123,6 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest { ...@@ -123,14 +123,6 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest {
} }
protected HiveConf getHiveConf() { protected HiveConf getHiveConf() {
HiveConf hiveConf = new HiveConf(this.getClass()); return HiveHookIT.createHiveConf(DGI_URL);
hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/atlas");
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true");
return hiveConf;
} }
} }
...@@ -54,7 +54,7 @@ public class HiveHookIT { ...@@ -54,7 +54,7 @@ public class HiveHookIT {
@BeforeClass @BeforeClass
public void setUp() throws Exception { public void setUp() throws Exception {
//Set-up hive session //Set-up hive session
HiveConf conf = getHiveConf(); HiveConf conf = createHiveConf();
driver = new Driver(conf); driver = new Driver(conf);
ss = new SessionState(conf, System.getProperty("user.name")); ss = new SessionState(conf, System.getProperty("user.name"));
ss = SessionState.start(ss); ss = SessionState.start(ss);
...@@ -63,15 +63,19 @@ public class HiveHookIT { ...@@ -63,15 +63,19 @@ public class HiveHookIT {
dgiCLient = new AtlasClient(DGI_URL); dgiCLient = new AtlasClient(DGI_URL);
} }
private HiveConf getHiveConf() { public static HiveConf createHiveConf() {
HiveConf hiveConf = new HiveConf(this.getClass()); return createHiveConf(DGI_URL);
}
public static HiveConf createHiveConf(String atlasEndpoint) {
HiveConf hiveConf = new HiveConf(HiveHookIT.class);
hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, ""); hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName()); hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore"); hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore");
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL); hiveConf.set(HiveMetaStoreBridge.ATLAS_ENDPOINT, atlasEndpoint);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true"); hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true"); hiveConf.set(HiveHook.CONF_SYNC, "true");
hiveConf.set(HiveMetaStoreBridge.HIVE_CLUSTER_NAME, CLUSTER_NAME); hiveConf.set(HiveMetaStoreBridge.HIVE_CLUSTER_NAME, CLUSTER_NAME);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODE, true); //to not use hdfs hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODE, true); //to not use hdfs
hiveConf.setVar(HiveConf.ConfVars.HIVETESTMODEPREFIX, ""); hiveConf.setVar(HiveConf.ConfVars.HIVETESTMODEPREFIX, "");
......
...@@ -187,15 +187,7 @@ public class SSLHiveHookIT { ...@@ -187,15 +187,7 @@ public class SSLHiveHookIT {
} }
private HiveConf getHiveConf() { private HiveConf getHiveConf() {
HiveConf hiveConf = new HiveConf(this.getClass()); return HiveHookIT.createHiveConf(DGI_URL);
hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/atlas");
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true");
return hiveConf;
} }
private void runCommand(String cmd) throws Exception { private void runCommand(String cmd) throws Exception {
......
...@@ -4,6 +4,7 @@ Apache Atlas Release Notes ...@@ -4,6 +4,7 @@ Apache Atlas Release Notes
--trunk - unreleased --trunk - unreleased
INCOMPATIBLE CHANGES: INCOMPATIBLE CHANGES:
ATLAS-54 Rename configs in hive hook (shwethags)
ATLAS-3 Mixed Index creation fails with Date types (suma.shivaprasad via shwethags) ATLAS-3 Mixed Index creation fails with Date types (suma.shivaprasad via shwethags)
ALL CHANGES: ALL CHANGES:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment