Commit 038b6b31 by Suma Shivaprasad

ATLAS-1030 Atlas hooks should use properties from atlas-application.properties,…

ATLAS-1030 Atlas hooks should use properties from atlas-application.properties, instead of component's configuration (mneethiraj via sumasai)
parent 3f47408e
...@@ -89,7 +89,7 @@ public class FalconHookIT { ...@@ -89,7 +89,7 @@ public class FalconHookIT {
return; return;
} }
HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(new HiveConf(), atlasClient); HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(ApplicationProperties.get(), new HiveConf(), atlasClient);
hiveMetaStoreBridge.registerHiveDataModel(); hiveMetaStoreBridge.registerHiveDataModel();
FalconDataModelGenerator dataModelGenerator = new FalconDataModelGenerator(); FalconDataModelGenerator dataModelGenerator = new FalconDataModelGenerator();
......
...@@ -42,11 +42,6 @@ ...@@ -42,11 +42,6 @@
</property> </property>
<property> <property>
<name>atlas.cluster.name</name>
<value>test</value>
</property>
<property>
<name>fs.pfile.impl</name> <name>fs.pfile.impl</name>
<value>org.apache.hadoop.fs.ProxyLocalFileSystem</value> <value>org.apache.hadoop.fs.ProxyLocalFileSystem</value>
</property> </property>
......
...@@ -99,16 +99,16 @@ public class HiveMetaStoreBridge { ...@@ -99,16 +99,16 @@ public class HiveMetaStoreBridge {
* Construct a HiveMetaStoreBridge. * Construct a HiveMetaStoreBridge.
* @param hiveConf {@link HiveConf} for Hive component in the cluster * @param hiveConf {@link HiveConf} for Hive component in the cluster
*/ */
public HiveMetaStoreBridge(HiveConf hiveConf) throws Exception { public HiveMetaStoreBridge(Configuration atlasProperties, HiveConf hiveConf) throws Exception {
this(hiveConf.get(HIVE_CLUSTER_NAME, DEFAULT_CLUSTER_NAME), Hive.get(hiveConf), null); this(atlasProperties, hiveConf, null);
} }
/** /**
* Construct a HiveMetaStoreBridge. * Construct a HiveMetaStoreBridge.
* @param hiveConf {@link HiveConf} for Hive component in the cluster * @param hiveConf {@link HiveConf} for Hive component in the cluster
*/ */
public HiveMetaStoreBridge(HiveConf hiveConf, AtlasClient atlasClient) throws Exception { public HiveMetaStoreBridge(Configuration atlasProperties, HiveConf hiveConf, AtlasClient atlasClient) throws Exception {
this(hiveConf.get(HIVE_CLUSTER_NAME, DEFAULT_CLUSTER_NAME), Hive.get(hiveConf), atlasClient); this(atlasProperties.getString(HIVE_CLUSTER_NAME, DEFAULT_CLUSTER_NAME), Hive.get(hiveConf), atlasClient);
} }
AtlasClient getAtlasClient() { AtlasClient getAtlasClient() {
...@@ -660,7 +660,7 @@ public class HiveMetaStoreBridge { ...@@ -660,7 +660,7 @@ public class HiveMetaStoreBridge {
failOnError = true; failOnError = true;
} }
HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(new HiveConf(), atlasClient); HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(atlasConf, new HiveConf(), atlasClient);
hiveMetaStoreBridge.registerHiveDataModel(); hiveMetaStoreBridge.registerHiveDataModel();
hiveMetaStoreBridge.importHiveMetadata(failOnError); hiveMetaStoreBridge.importHiveMetadata(failOnError);
} }
......
...@@ -200,7 +200,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -200,7 +200,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
LOG.info("Entered Atlas hook for hook type {} operation {}", event.getHookType(), event.getOperation()); LOG.info("Entered Atlas hook for hook type {} operation {}", event.getHookType(), event.getOperation());
HiveMetaStoreBridge dgiBridge = new HiveMetaStoreBridge(hiveConf); HiveMetaStoreBridge dgiBridge = new HiveMetaStoreBridge(atlasProperties, hiveConf);
switch (event.getOperation()) { switch (event.getOperation()) {
case CREATEDATABASE: case CREATEDATABASE:
......
...@@ -87,7 +87,7 @@ public class HiveHookIT { ...@@ -87,7 +87,7 @@ public class HiveHookIT {
private static final Logger LOG = org.slf4j.LoggerFactory.getLogger(HiveHookIT.class); private static final Logger LOG = org.slf4j.LoggerFactory.getLogger(HiveHookIT.class);
private static final String DGI_URL = "http://localhost:21000/"; private static final String DGI_URL = "http://localhost:21000/";
private static final String CLUSTER_NAME = "test"; private static final String CLUSTER_NAME = "primary";
public static final String DEFAULT_DB = "default"; public static final String DEFAULT_DB = "default";
private static final String PART_FILE = "2015-01-01"; private static final String PART_FILE = "2015-01-01";
...@@ -115,7 +115,7 @@ public class HiveHookIT { ...@@ -115,7 +115,7 @@ public class HiveHookIT {
Configuration configuration = ApplicationProperties.get(); Configuration configuration = ApplicationProperties.get();
atlasClient = new AtlasClient(configuration.getString(HiveMetaStoreBridge.ATLAS_ENDPOINT, DGI_URL)); atlasClient = new AtlasClient(configuration.getString(HiveMetaStoreBridge.ATLAS_ENDPOINT, DGI_URL));
hiveMetaStoreBridge = new HiveMetaStoreBridge(conf, atlasClient); hiveMetaStoreBridge = new HiveMetaStoreBridge(configuration, conf, atlasClient);
hiveMetaStoreBridge.registerHiveDataModel(); hiveMetaStoreBridge.registerHiveDataModel();
} }
...@@ -1624,7 +1624,7 @@ public class HiveHookIT { ...@@ -1624,7 +1624,7 @@ public class HiveHookIT {
verifyEntityProperties(entityType, entityName, expectedProps, false); verifyEntityProperties(entityType, entityName, expectedProps, false);
if (entityType != Entity.Type.DATABASE) { if (entityType != Entity.Type.DATABASE) {
//Database unset properties doesnt work strangely - alter database %s unset DBPROPERTIES doesnt work //Database unset properties doesnt work - alter database %s unset DBPROPERTIES doesnt work
//Unset all the props //Unset all the props
StringBuilder sb = new StringBuilder("'"); StringBuilder sb = new StringBuilder("'");
query = String.format(fmtQuery, entityName, UNSET_OP, Joiner.on("','").skipNulls().appendTo(sb, expectedProps.keySet()).append('\'')); query = String.format(fmtQuery, entityName, UNSET_OP, Joiner.on("','").skipNulls().appendTo(sb, expectedProps.keySet()).append('\''));
...@@ -1928,7 +1928,7 @@ public class HiveHookIT { ...@@ -1928,7 +1928,7 @@ public class HiveHookIT {
fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e); fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e);
} }
LOG.debug("Waiting up to " + (mustEnd - System.currentTimeMillis()) + " msec as assertion failed", e); LOG.debug("Waiting up to " + (mustEnd - System.currentTimeMillis()) + " msec as assertion failed", e);
Thread.sleep(400); Thread.sleep(5000);
} }
} }
} }
......
...@@ -57,11 +57,6 @@ ...@@ -57,11 +57,6 @@
</property> </property>
<property> <property>
<name>atlas.cluster.name</name>
<value>test</value>
</property>
<property>
<name>fs.pfile.impl</name> <name>fs.pfile.impl</name>
<value>org.apache.hadoop.fs.ProxyLocalFileSystem</value> <value>org.apache.hadoop.fs.ProxyLocalFileSystem</value>
</property> </property>
......
...@@ -163,8 +163,7 @@ public class SqoopHook extends SqoopJobDataPublisher { ...@@ -163,8 +163,7 @@ public class SqoopHook extends SqoopJobDataPublisher {
@Override @Override
public void publish(SqoopJobDataPublisher.Data data) throws Exception { public void publish(SqoopJobDataPublisher.Data data) throws Exception {
Configuration atlasProperties = ApplicationProperties.get(); Configuration atlasProperties = ApplicationProperties.get();
org.apache.hadoop.conf.Configuration sqoopConf = new org.apache.hadoop.conf.Configuration(); String clusterName = atlasProperties.getString(ATLAS_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);
String clusterName = sqoopConf.get(ATLAS_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);
Referenceable dbStoreRef = createDBStoreInstance(data); Referenceable dbStoreRef = createDBStoreInstance(data);
Referenceable dbRef = createHiveDatabaseInstance(clusterName, data.getHiveDB()); Referenceable dbRef = createHiveDatabaseInstance(clusterName, data.getHiveDB());
......
...@@ -54,7 +54,7 @@ public class SqoopHookIT { ...@@ -54,7 +54,7 @@ public class SqoopHookIT {
private void registerDataModels(AtlasClient client) throws Exception { private void registerDataModels(AtlasClient client) throws Exception {
// Make sure hive model exists // Make sure hive model exists
HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(new HiveConf(), atlasClient); HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(ApplicationProperties.get(), new HiveConf(), atlasClient);
hiveMetaStoreBridge.registerHiveDataModel(); hiveMetaStoreBridge.registerHiveDataModel();
SqoopDataModelGenerator dataModelGenerator = new SqoopDataModelGenerator(); SqoopDataModelGenerator dataModelGenerator = new SqoopDataModelGenerator();
......
...@@ -42,11 +42,6 @@ ...@@ -42,11 +42,6 @@
</property> </property>
<property> <property>
<name>atlas.cluster.name</name>
<value>test</value>
</property>
<property>
<name>fs.pfile.impl</name> <name>fs.pfile.impl</name>
<value>org.apache.hadoop.fs.ProxyLocalFileSystem</value> <value>org.apache.hadoop.fs.ProxyLocalFileSystem</value>
</property> </property>
......
...@@ -370,10 +370,6 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook { ...@@ -370,10 +370,6 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
} }
private String getClusterName(Map stormConf) { private String getClusterName(Map stormConf) {
String clusterName = AtlasConstants.DEFAULT_CLUSTER_NAME; return atlasProperties.getString(AtlasConstants.CLUSTER_NAME_KEY, AtlasConstants.DEFAULT_CLUSTER_NAME);
if (stormConf.containsKey(AtlasConstants.CLUSTER_NAME_KEY)) {
clusterName = (String)stormConf.get(AtlasConstants.CLUSTER_NAME_KEY);
}
return clusterName;
} }
} }
...@@ -6,6 +6,7 @@ INCOMPATIBLE CHANGES: ...@@ -6,6 +6,7 @@ INCOMPATIBLE CHANGES:
ALL CHANGES: ALL CHANGES:
ATLAS-1027 Atlas hooks should use properties from atlas-application.properties, instead of component's configuration (mneethiraj via sumasai)
ATLAS-1030 Add instrumentation to measure performance: REST API (mneethiraj via sumasai) ATLAS-1030 Add instrumentation to measure performance: REST API (mneethiraj via sumasai)
ATLAS-996 DSL queries with comparsions of many primitive types fail (jnhagelb via shwethags) ATLAS-996 DSL queries with comparsions of many primitive types fail (jnhagelb via shwethags)
ATLAS-971 UI not displaying results for this query - Eg: "hive_table as t where qualifiedName = 'default.input@cl1' select t" (kevalbhatt18 via shwethags) ATLAS-971 UI not displaying results for this query - Eg: "hive_table as t where qualifiedName = 'default.input@cl1' select t" (kevalbhatt18 via shwethags)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment