Commit e0536224 by Hemanth Yamijala

ATLAS-542 Make qualifiedName and name consistent across all Datasets and Process…

ATLAS-542 Make qualifiedName and name consistent across all Datasets and Process (sumasai via yhemanth)
parent 0a44790e
...@@ -277,8 +277,8 @@ public class FalconHook extends AtlasHook implements FalconEventPublisher { ...@@ -277,8 +277,8 @@ public class FalconHook extends AtlasHook implements FalconEventPublisher {
Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName()); Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
tableRef.set(HiveDataModelGenerator.NAME, tableRef.set(HiveDataModelGenerator.NAME,
HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName)); tableName);
tableRef.set(HiveDataModelGenerator.TABLE_NAME, tableName); tableRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName));
tableRef.set(HiveDataModelGenerator.DB, dbRef); tableRef.set(HiveDataModelGenerator.DB, dbRef);
entities.add(tableRef); entities.add(tableRef);
......
...@@ -154,12 +154,12 @@ public class FalconHookIT { ...@@ -154,12 +154,12 @@ public class FalconHookIT {
Id inId = (Id) ((List)processEntity.get("inputs")).get(0); Id inId = (Id) ((List)processEntity.get("inputs")).get(0);
Referenceable inEntity = atlasClient.getEntity(inId._getId()); Referenceable inEntity = atlasClient.getEntity(inId._getId());
assertEquals(inEntity.get("name"), assertEquals(inEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), inDbName, inTableName)); HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), inDbName, inTableName));
Id outId = (Id) ((List)processEntity.get("outputs")).get(0); Id outId = (Id) ((List)processEntity.get("outputs")).get(0);
Referenceable outEntity = atlasClient.getEntity(outId._getId()); Referenceable outEntity = atlasClient.getEntity(outId._getId());
assertEquals(outEntity.get("name"), assertEquals(outEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), outDbName, outTableName)); HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), outDbName, outTableName));
} }
...@@ -212,7 +212,7 @@ public class FalconHookIT { ...@@ -212,7 +212,7 @@ public class FalconHookIT {
Id outId = (Id) ((List)processEntity.get("outputs")).get(0); Id outId = (Id) ((List)processEntity.get("outputs")).get(0);
Referenceable outEntity = atlasClient.getEntity(outId._getId()); Referenceable outEntity = atlasClient.getEntity(outId._getId());
assertEquals(outEntity.get("name"), assertEquals(outEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), outDbName, outTableName)); HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), outDbName, outTableName));
} }
......
...@@ -37,7 +37,7 @@ object FSDataModel extends App { ...@@ -37,7 +37,7 @@ object FSDataModel extends App {
val typesDef : TypesDef = types { val typesDef : TypesDef = types {
// FS DataSet // FS DataSet
_class(FSDataTypes.FS_PATH.toString, List("DataSet", AtlasClient.REFERENCEABLE_SUPER_TYPE)) { _class(FSDataTypes.FS_PATH.toString) {
//fully qualified path/URI to the filesystem path is stored in 'qualifiedName' and 'path'. //fully qualified path/URI to the filesystem path is stored in 'qualifiedName' and 'path'.
"path" ~ (string, required, indexed) "path" ~ (string, required, indexed)
"createTime" ~ (date, optional, indexed) "createTime" ~ (date, optional, indexed)
...@@ -63,7 +63,7 @@ object FSDataModel extends App { ...@@ -63,7 +63,7 @@ object FSDataModel extends App {
} }
//HDFS DataSet //HDFS DataSet
_class(FSDataTypes.HDFS_PATH.toString, List(FSDataTypes.FS_PATH.toString)) { _class(FSDataTypes.HDFS_PATH.toString, List("DataSet", FSDataTypes.FS_PATH.toString)) {
//Making cluster optional since path is already unique containing the namenode URI //Making cluster optional since path is already unique containing the namenode URI
AtlasConstants.CLUSTER_NAME_ATTRIBUTE ~ (string, optional, indexed) AtlasConstants.CLUSTER_NAME_ATTRIBUTE ~ (string, optional, indexed)
"numberOfReplicas" ~ (int, optional, indexed) "numberOfReplicas" ~ (int, optional, indexed)
......
...@@ -35,6 +35,7 @@ import org.apache.atlas.typesystem.persistence.Id; ...@@ -35,6 +35,7 @@ import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.utils.AuthenticationUtil; import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.FieldSchema;
...@@ -336,8 +337,8 @@ public class HiveMetaStoreBridge { ...@@ -336,8 +337,8 @@ public class HiveMetaStoreBridge {
} }
String tableQualifiedName = getTableQualifiedName(clusterName, hiveTable); String tableQualifiedName = getTableQualifiedName(clusterName, hiveTable);
tableReference.set(HiveDataModelGenerator.NAME, tableQualifiedName); tableReference.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName);
tableReference.set(HiveDataModelGenerator.TABLE_NAME, hiveTable.getTableName().toLowerCase()); tableReference.set(HiveDataModelGenerator.NAME, hiveTable.getTableName().toLowerCase());
tableReference.set(HiveDataModelGenerator.OWNER, hiveTable.getOwner()); tableReference.set(HiveDataModelGenerator.OWNER, hiveTable.getOwner());
Date createDate = new Date(); Date createDate = new Date();
...@@ -485,10 +486,8 @@ public class HiveMetaStoreBridge { ...@@ -485,10 +486,8 @@ public class HiveMetaStoreBridge {
public Referenceable fillHDFSDataSet(String pathUri) { public Referenceable fillHDFSDataSet(String pathUri) {
Referenceable ref = new Referenceable(FSDataTypes.HDFS_PATH().toString()); Referenceable ref = new Referenceable(FSDataTypes.HDFS_PATH().toString());
ref.set("path", pathUri); ref.set("path", pathUri);
// Path path = new Path(pathUri); Path path = new Path(pathUri);
// ref.set("name", path.getName()); ref.set(AtlasClient.NAME, path.getName());
//TODO - Fix after ATLAS-542 to shorter Name
ref.set(HiveDataModelGenerator.NAME, pathUri);
ref.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, pathUri); ref.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, pathUri);
return ref; return ref;
} }
......
...@@ -282,7 +282,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -282,7 +282,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
messages.add( messages.add(
new HookNotification.EntityDeleteRequest(event.getUser(), new HookNotification.EntityDeleteRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(), HiveDataTypes.HIVE_TABLE.getName(),
HiveDataModelGenerator.NAME, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
tblQualifiedName)); tblQualifiedName));
} }
...@@ -403,18 +403,19 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -403,18 +403,19 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
} }
private Referenceable replaceTableQFName(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName, final String newTableQFName) throws HiveException { private Referenceable replaceTableQFName(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName, final String newTableQFName) throws HiveException {
tableEntity.set(HiveDataModelGenerator.NAME, oldTableQFName); tableEntity.set(HiveDataModelGenerator.NAME, oldTable.getTableName().toLowerCase());
tableEntity.set(HiveDataModelGenerator.TABLE_NAME, oldTable.getTableName().toLowerCase()); tableEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, oldTableQFName);
//Replace table entity with new name //Replace table entity with new name
final Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName()); final Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
newEntity.set(HiveDataModelGenerator.NAME, newTableQFName); newEntity.set(HiveDataModelGenerator.NAME, newTable.getTableName().toLowerCase());
newEntity.set(HiveDataModelGenerator.TABLE_NAME, newTable.getTableName().toLowerCase()); newEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newTableQFName);
ArrayList<String> alias_list = new ArrayList<>(); ArrayList<String> alias_list = new ArrayList<>();
alias_list.add(oldTable.getTableName().toLowerCase()); alias_list.add(oldTable.getTableName().toLowerCase());
newEntity.set(HiveDataModelGenerator.TABLE_ALIAS_LIST, alias_list); newEntity.set(HiveDataModelGenerator.TABLE_ALIAS_LIST, alias_list);
messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(), messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(), HiveDataModelGenerator.NAME, HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
oldTableQFName, newEntity)); oldTableQFName, newEntity));
return newEntity; return newEntity;
...@@ -724,7 +725,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -724,7 +725,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
for (Referenceable input : refs) { for (Referenceable input : refs) {
//TODO - Change to qualifiedName later //TODO - Change to qualifiedName later
buffer.append(":"); buffer.append(":");
String dataSetQlfdName = (String) input.get(AtlasClient.NAME); String dataSetQlfdName = (String) input.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME);
buffer.append(dataSetQlfdName.toLowerCase().replaceAll("/", "")); buffer.append(dataSetQlfdName.toLowerCase().replaceAll("/", ""));
} }
} }
......
...@@ -244,8 +244,6 @@ public class HiveDataModelGenerator { ...@@ -244,8 +244,6 @@ public class HiveDataModelGenerator {
private void createTableClass() throws AtlasException { private void createTableClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(TABLE_NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition(OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition(OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition(CREATE_TIME, DataTypes.DATE_TYPE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition(CREATE_TIME, DataTypes.DATE_TYPE.getName(), Multiplicity.OPTIONAL, false,
......
...@@ -198,8 +198,8 @@ public class HiveHookIT { ...@@ -198,8 +198,8 @@ public class HiveHookIT {
Assert.assertEquals(tableRef.get(HiveDataModelGenerator.TABLE_TYPE_ATTR), TableType.MANAGED_TABLE.name()); Assert.assertEquals(tableRef.get(HiveDataModelGenerator.TABLE_TYPE_ATTR), TableType.MANAGED_TABLE.name());
Assert.assertEquals(tableRef.get(HiveDataModelGenerator.COMMENT), "table comment"); Assert.assertEquals(tableRef.get(HiveDataModelGenerator.COMMENT), "table comment");
String entityName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName); String entityName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName);
Assert.assertEquals(tableRef.get(NAME), entityName); Assert.assertEquals(tableRef.get(HiveDataModelGenerator.NAME), tableName.toLowerCase());
Assert.assertEquals(tableRef.get(NAME), "default." + tableName.toLowerCase() + "@" + CLUSTER_NAME); Assert.assertEquals(tableRef.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), entityName);
Table t = hiveMetaStoreBridge.hiveClient.getTable(DEFAULT_DB, tableName); Table t = hiveMetaStoreBridge.hiveClient.getTable(DEFAULT_DB, tableName);
long createTime = Long.parseLong(t.getMetadata().getProperty(hive_metastoreConstants.DDL_TIME)) * HiveMetaStoreBridge.MILLIS_CONVERT_FACTOR; long createTime = Long.parseLong(t.getMetadata().getProperty(hive_metastoreConstants.DDL_TIME)) * HiveMetaStoreBridge.MILLIS_CONVERT_FACTOR;
...@@ -263,7 +263,7 @@ public class HiveHookIT { ...@@ -263,7 +263,7 @@ public class HiveHookIT {
List<Id> tableRef = (List<Id>) processReference.get(attrName); List<Id> tableRef = (List<Id>) processReference.get(attrName);
for(int i = 0; i < expectedTableNames.length; i++) { for(int i = 0; i < expectedTableNames.length; i++) {
Referenceable entity = atlasClient.getEntity(tableRef.get(i)._getId()); Referenceable entity = atlasClient.getEntity(tableRef.get(i)._getId());
Assert.assertEquals(entity.get(AtlasClient.NAME), expectedTableNames[i]); Assert.assertEquals(entity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), expectedTableNames[i]);
} }
} }
...@@ -1009,8 +1009,7 @@ public class HiveHookIT { ...@@ -1009,8 +1009,7 @@ public class HiveHookIT {
Referenceable hdfsPathRef = atlasClient.getEntity(hdfsPathId); Referenceable hdfsPathRef = atlasClient.getEntity(hdfsPathId);
Assert.assertEquals(hdfsPathRef.get("path"), testPathNormed); Assert.assertEquals(hdfsPathRef.get("path"), testPathNormed);
Assert.assertEquals(hdfsPathRef.get(NAME), testPathNormed); Assert.assertEquals(hdfsPathRef.get(NAME), new Path(testPathNormed).getName());
// Assert.assertEquals(hdfsPathRef.get("name"), new Path(testPath).getName());
Assert.assertEquals(hdfsPathRef.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), testPathNormed); Assert.assertEquals(hdfsPathRef.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), testPathNormed);
return hdfsPathRef.getId()._getId(); return hdfsPathRef.getId()._getId();
...@@ -1018,7 +1017,7 @@ public class HiveHookIT { ...@@ -1018,7 +1017,7 @@ public class HiveHookIT {
private String assertHDFSPathIsRegistered(String path) throws Exception { private String assertHDFSPathIsRegistered(String path) throws Exception {
LOG.debug("Searching for hdfs path {}", path); LOG.debug("Searching for hdfs path {}", path);
return assertEntityIsRegistered(FSDataTypes.HDFS_PATH().toString(), NAME, path, null); return assertEntityIsRegistered(FSDataTypes.HDFS_PATH().toString(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, path, null);
} }
@Test @Test
...@@ -1403,7 +1402,7 @@ public class HiveHookIT { ...@@ -1403,7 +1402,7 @@ public class HiveHookIT {
if (inputTblName != null) { if (inputTblName != null) {
Referenceable inputTableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.name(), new HashMap<String, Object>() {{ Referenceable inputTableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.name(), new HashMap<String, Object>() {{
put(NAME, inputTblName); put(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, inputTblName);
}}); }});
inputs = new ArrayList<Referenceable>(); inputs = new ArrayList<Referenceable>();
inputs.add(inputTableRef); inputs.add(inputTableRef);
...@@ -1411,7 +1410,7 @@ public class HiveHookIT { ...@@ -1411,7 +1410,7 @@ public class HiveHookIT {
List<Referenceable> outputs = null; List<Referenceable> outputs = null;
if (outputTblName != null) { if (outputTblName != null) {
Referenceable outputTableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.name(), new HashMap<String, Object>() {{ Referenceable outputTableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.name(), new HashMap<String, Object>() {{
put(NAME, outputTblName); put(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, outputTblName);
}}); }});
outputs = new ArrayList<Referenceable>(); outputs = new ArrayList<Referenceable>();
...@@ -1448,13 +1447,13 @@ public class HiveHookIT { ...@@ -1448,13 +1447,13 @@ public class HiveHookIT {
private void assertTableIsNotRegistered(String dbName, String tableName, boolean isTemporaryTable) throws Exception { private void assertTableIsNotRegistered(String dbName, String tableName, boolean isTemporaryTable) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName); LOG.debug("Searching for table {}.{}", dbName, tableName);
String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, isTemporaryTable); String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, isTemporaryTable);
assertEntityIsNotRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.NAME, tableQualifiedName); assertEntityIsNotRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName);
} }
private void assertTableIsNotRegistered(String dbName, String tableName) throws Exception { private void assertTableIsNotRegistered(String dbName, String tableName) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName); LOG.debug("Searching for table {}.{}", dbName, tableName);
String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, false); String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, false);
assertEntityIsNotRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.NAME, tableQualifiedName); assertEntityIsNotRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName);
} }
private void assertDBIsNotRegistered(String dbName) throws Exception { private void assertDBIsNotRegistered(String dbName) throws Exception {
...@@ -1474,7 +1473,7 @@ public class HiveHookIT { ...@@ -1474,7 +1473,7 @@ public class HiveHookIT {
private String assertTableIsRegistered(String dbName, String tableName, AssertPredicate assertPredicate, boolean isTemporary) throws Exception { private String assertTableIsRegistered(String dbName, String tableName, AssertPredicate assertPredicate, boolean isTemporary) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName); LOG.debug("Searching for table {}.{}", dbName, tableName);
String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, isTemporary); String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, isTemporary);
return assertEntityIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.NAME, tableQualifiedName, return assertEntityIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName,
assertPredicate); assertPredicate);
} }
......
...@@ -72,9 +72,9 @@ public class SqoopHook extends SqoopJobDataPublisher { ...@@ -72,9 +72,9 @@ public class SqoopHook extends SqoopJobDataPublisher {
public Referenceable createHiveTableInstance(String clusterName, Referenceable dbRef, public Referenceable createHiveTableInstance(String clusterName, Referenceable dbRef,
String tableName, String dbName) throws Exception { String tableName, String dbName) throws Exception {
Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName()); Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
tableRef.set(HiveDataModelGenerator.NAME, tableRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName)); HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName));
tableRef.set(HiveDataModelGenerator.TABLE_NAME, tableName.toLowerCase()); tableRef.set(HiveDataModelGenerator.NAME, tableName.toLowerCase());
tableRef.set(HiveDataModelGenerator.DB, dbRef); tableRef.set(HiveDataModelGenerator.DB, dbRef);
return tableRef; return tableRef;
} }
...@@ -93,6 +93,7 @@ public class SqoopHook extends SqoopJobDataPublisher { ...@@ -93,6 +93,7 @@ public class SqoopHook extends SqoopJobDataPublisher {
String source = table != null ? table : query; String source = table != null ? table : query;
String name = getSqoopDBStoreName(data); String name = getSqoopDBStoreName(data);
storeRef.set(SqoopDataModelGenerator.NAME, name); storeRef.set(SqoopDataModelGenerator.NAME, name);
storeRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
storeRef.set(SqoopDataModelGenerator.DB_STORE_TYPE, data.getStoreType()); storeRef.set(SqoopDataModelGenerator.DB_STORE_TYPE, data.getStoreType());
storeRef.set(SqoopDataModelGenerator.DB_STORE_USAGE, usage); storeRef.set(SqoopDataModelGenerator.DB_STORE_USAGE, usage);
storeRef.set(SqoopDataModelGenerator.STORE_URI, data.getUrl()); storeRef.set(SqoopDataModelGenerator.STORE_URI, data.getUrl());
......
...@@ -106,7 +106,7 @@ public class SqoopHookIT { ...@@ -106,7 +106,7 @@ public class SqoopHookIT {
private String assertDBStoreIsRegistered(String storeName) throws Exception { private String assertDBStoreIsRegistered(String storeName) throws Exception {
LOG.debug("Searching for db store {}", storeName); LOG.debug("Searching for db store {}", storeName);
String query = String.format( String query = String.format(
"%s as t where name = '%s'" + " select t", "%s as t where " + AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME + " = '%s'" + " select t",
SqoopDataTypes.SQOOP_DBDATASTORE.getName(), storeName); SqoopDataTypes.SQOOP_DBDATASTORE.getName(), storeName);
return assertEntityIsRegistered(query); return assertEntityIsRegistered(query);
} }
...@@ -114,7 +114,7 @@ public class SqoopHookIT { ...@@ -114,7 +114,7 @@ public class SqoopHookIT {
private String assertHiveTableIsRegistered(String dbName, String tableName) throws Exception { private String assertHiveTableIsRegistered(String dbName, String tableName) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName); LOG.debug("Searching for table {}.{}", dbName, tableName);
String query = String.format( String query = String.format(
"%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'" + " select t", "%s as t where " + AtlasClient.NAME + " = '%s', db where " + AtlasClient.NAME + " = '%s' and clusterName = '%s'" + " select t",
HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME); HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME);
return assertEntityIsRegistered(query); return assertEntityIsRegistered(query);
} }
......
...@@ -195,18 +195,19 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook { ...@@ -195,18 +195,19 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
topologyOwner = ANONYMOUS_OWNER; topologyOwner = ANONYMOUS_OWNER;
} }
dataSetReferenceable.set("owner", topologyOwner); dataSetReferenceable.set("owner", topologyOwner);
dataSetReferenceable.set("name", getKafkaTopicQualifiedName(getClusterName(stormConf), topicName)); dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getKafkaTopicQualifiedName(getClusterName(stormConf), topicName));
dataSetReferenceable.set(AtlasClient.NAME, topicName);
break; break;
case "HBaseBolt": case "HBaseBolt":
dataSetReferenceable = new Referenceable(StormDataTypes.HBASE_TABLE.getName()); dataSetReferenceable = new Referenceable(StormDataTypes.HBASE_TABLE.getName());
final String hbaseTableName = config.get("HBaseBolt.tableName"); final String hbaseTableName = config.get("HBaseBolt.tableName");
dataSetReferenceable.set("uri", stormConf.get("hbase.rootdir")); dataSetReferenceable.set("uri", stormConf.get("hbase.rootdir"));
dataSetReferenceable.set("tableName", hbaseTableName); dataSetReferenceable.set(AtlasClient.NAME, hbaseTableName);
dataSetReferenceable.set("owner", stormConf.get("storm.kerberos.principal")); dataSetReferenceable.set("owner", stormConf.get("storm.kerberos.principal"));
clusterName = extractComponentClusterName(HBaseConfiguration.create(), stormConf); clusterName = extractComponentClusterName(HBaseConfiguration.create(), stormConf);
//TODO - Hbase Namespace is hardcoded to 'default'. need to check how to get this or is it already part of tableName //TODO - Hbase Namespace is hardcoded to 'default'. need to check how to get this or is it already part of tableName
dataSetReferenceable.set("name", getHbaseTableQualifiedName(clusterName, HBASE_NAMESPACE_DEFAULT, dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getHbaseTableQualifiedName(clusterName, HBASE_NAMESPACE_DEFAULT,
hbaseTableName)); hbaseTableName));
break; break;
...@@ -220,10 +221,8 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook { ...@@ -220,10 +221,8 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, hdfsPathStr); dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, hdfsPathStr);
dataSetReferenceable.set("path", hdfsPathStr); dataSetReferenceable.set("path", hdfsPathStr);
dataSetReferenceable.set("owner", stormConf.get("hdfs.kerberos.principal")); dataSetReferenceable.set("owner", stormConf.get("hdfs.kerberos.principal"));
//Fix after ATLAS-542 final Path hdfsPath = new Path(hdfsPathStr);
// final Path hdfsPath = new Path(hdfsPathStr); dataSetReferenceable.set(AtlasClient.NAME, hdfsPath.getName());
// dataSetReferenceable.set(AtlasClient.NAME, hdfsPath.getName());
dataSetReferenceable.set(AtlasClient.NAME, hdfsPathStr);
break; break;
case "HiveBolt": case "HiveBolt":
...@@ -240,9 +239,9 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook { ...@@ -240,9 +239,9 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
dataSetReferenceable = new Referenceable("hive_table"); dataSetReferenceable = new Referenceable("hive_table");
final String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(clusterName, final String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(clusterName,
databaseName, hiveTableName); databaseName, hiveTableName);
dataSetReferenceable.set(HiveDataModelGenerator.NAME, tableQualifiedName); dataSetReferenceable.set(HiveDataModelGenerator.NAME, hiveTableName);
dataSetReferenceable.set(HiveDataModelGenerator.DB, dbReferenceable); dataSetReferenceable.set(HiveDataModelGenerator.DB, dbReferenceable);
dataSetReferenceable.set(HiveDataModelGenerator.TABLE_NAME, hiveTableName); dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName);
break; break;
default: default:
...@@ -294,7 +293,7 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook { ...@@ -294,7 +293,7 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
SpoutSpec stormSpout) throws IllegalAccessException { SpoutSpec stormSpout) throws IllegalAccessException {
Referenceable spoutReferenceable = new Referenceable( Referenceable spoutReferenceable = new Referenceable(
StormDataTypes.STORM_SPOUT.getName(), "DataProducer"); StormDataTypes.STORM_SPOUT.getName(), "DataProducer");
spoutReferenceable.set("name", spoutName); spoutReferenceable.set(AtlasClient.NAME, spoutName);
Serializable instance = Utils.javaDeserialize( Serializable instance = Utils.javaDeserialize(
stormSpout.get_spout_object().get_serialized_java(), Serializable.class); stormSpout.get_spout_object().get_serialized_java(), Serializable.class);
...@@ -319,7 +318,7 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook { ...@@ -319,7 +318,7 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
Referenceable boltReferenceable = new Referenceable( Referenceable boltReferenceable = new Referenceable(
StormDataTypes.STORM_BOLT.getName(), "DataProcessor"); StormDataTypes.STORM_BOLT.getName(), "DataProcessor");
boltReferenceable.set("name", boltName); boltReferenceable.set(AtlasClient.NAME, boltName);
Serializable instance = Utils.javaDeserialize( Serializable instance = Utils.javaDeserialize(
stormBolt.get_bolt_object().get_serialized_java(), Serializable.class); stormBolt.get_bolt_object().get_serialized_java(), Serializable.class);
......
...@@ -96,7 +96,6 @@ object StormDataModel extends App { ...@@ -96,7 +96,6 @@ object StormDataModel extends App {
// HBase Data Set // HBase Data Set
_class(StormDataTypes.HBASE_TABLE.getName, List("DataSet")) { _class(StormDataTypes.HBASE_TABLE.getName, List("DataSet")) {
"tableName" ~ (string, required, unique, indexed)
"uri" ~ (string, required) "uri" ~ (string, required)
"owner" ~ (string, required, indexed) "owner" ~ (string, required, indexed)
} }
......
...@@ -14,16 +14,15 @@ hive_type(ClassType) - super types [] - attributes [name, type1, type2, fields] ...@@ -14,16 +14,15 @@ hive_type(ClassType) - super types [] - attributes [name, type1, type2, fields]
hive_storagedesc(ClassType) - super types [Referenceable] - attributes [cols, location, inputFormat, outputFormat, compressed, numBuckets, serdeInfo, bucketCols, sortCols, parameters, storedAsSubDirectories] hive_storagedesc(ClassType) - super types [Referenceable] - attributes [cols, location, inputFormat, outputFormat, compressed, numBuckets, serdeInfo, bucketCols, sortCols, parameters, storedAsSubDirectories]
hive_role(ClassType) - super types [] - attributes [roleName, createTime, ownerName] hive_role(ClassType) - super types [] - attributes [roleName, createTime, ownerName]
hive_column(ClassType) - super types [Referenceable] - attributes [name, type, comment] hive_column(ClassType) - super types [Referenceable] - attributes [name, type, comment]
hive_table(ClassType) - super types [DataSet] - attributes [tableName, db, owner, createTime, lastAccessTime, comment, retention, sd, partitionKeys, columns, parameters, viewOriginalText, viewExpandedText, tableType, temporary] hive_table(ClassType) - super types [DataSet] - attributes [name, db, owner, createTime, lastAccessTime, comment, retention, sd, partitionKeys, columns, parameters, viewOriginalText, viewExpandedText, tableType, temporary]
hive_partition(ClassType) - super types [Referenceable] - attributes [values, table, createTime, lastAccessTime, sd, columns, parameters] hive_partition(ClassType) - super types [Referenceable] - attributes [values, table, createTime, lastAccessTime, sd, columns, parameters]
hive_process(ClassType) - super types [Process] - attributes [startTime, endTime, userName, operationType, queryText, queryPlan, queryId, queryGraph] hive_process(ClassType) - super types [Process] - attributes [startTime, endTime, userName, operationType, queryText, queryPlan, queryId, queryGraph]
</verbatim> </verbatim>
The entities are created and de-duped using unique qualified name. They provide namespace and can be used for querying/lineage as well. Note that dbName and tableName should be in lower case. clusterName is explained below. The entities are created and de-duped using unique qualified name. They provide namespace and can be used for querying/lineage as well. Note that name, dbName and tableName should be in lower case. clusterName is explained below.
* hive_db - attribute qualifiedName - <dbName>@<clusterName> * hive_db - attribute qualifiedName - <dbName>@<clusterName>
* hive_table - attribute name - <dbName>.<tableName>@<clusterName> * hive_table - attribute qualifiedName - <dbName>.<name>@<clusterName>
* hive_column - attribute qualifiedName - <dbName>.<tableName>.<columnName>@<clusterName> * hive_column - attribute qualifiedName - <dbName>.<tableName>.<columnName>@<clusterName>
* hive_partition - attribute qualifiedName - <dbName>.<tableName>.<partitionValues('-' separated)>@<clusterName>
* hive_process - attribute name - <queryString> - trimmed query string in lower case * hive_process - attribute name - <queryString> - trimmed query string in lower case
......
...@@ -3,6 +3,7 @@ Apache Atlas Release Notes ...@@ -3,6 +3,7 @@ Apache Atlas Release Notes
--trunk - unreleased --trunk - unreleased
INCOMPATIBLE CHANGES: INCOMPATIBLE CHANGES:
ATLAS-542 Make qualifiedName and name consistent across all Datasets and Process (sumasai via yhemanth)
ATLAS-716 Entity update/delete notifications (shwethags) ATLAS-716 Entity update/delete notifications (shwethags)
ATLAS-619 Canonicalize hive queries (sumasai) ATLAS-619 Canonicalize hive queries (sumasai)
ATLAS-497 Simple Authorization (saqeeb.s via yhemanth) ATLAS-497 Simple Authorization (saqeeb.s via yhemanth)
......
...@@ -63,7 +63,7 @@ public class DataSetLineageService implements LineageService { ...@@ -63,7 +63,7 @@ public class DataSetLineageService implements LineageService {
private static final String DATASET_EXISTS_QUERY = AtlasClient.DATA_SET_SUPER_TYPE + " where __guid = '%s'"; private static final String DATASET_EXISTS_QUERY = AtlasClient.DATA_SET_SUPER_TYPE + " where __guid = '%s'";
private static final String DATASET_NAME_EXISTS_QUERY = private static final String DATASET_NAME_EXISTS_QUERY =
AtlasClient.DATA_SET_SUPER_TYPE + " where name = '%s' and __state = 'ACTIVE'"; AtlasClient.DATA_SET_SUPER_TYPE + " where " + AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME + "='%s' and __state = 'ACTIVE'";
private static final Configuration propertiesConf; private static final Configuration propertiesConf;
......
...@@ -165,28 +165,28 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang ...@@ -165,28 +165,28 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang
} }
private static final AttributeDefinition NAME_ATTRIBUTE = private static final AttributeDefinition NAME_ATTRIBUTE =
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE); TypesUtil.createRequiredAttrDef(AtlasClient.NAME, DataTypes.STRING_TYPE);
private static final AttributeDefinition DESCRIPTION_ATTRIBUTE = private static final AttributeDefinition DESCRIPTION_ATTRIBUTE =
TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE); TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE);
@InterfaceAudience.Private @InterfaceAudience.Private
private void createSuperTypes() throws AtlasException { private void createSuperTypes() throws AtlasException {
HierarchicalTypeDefinition<ClassType> referenceableType = TypesUtil
.createClassTypeDef(AtlasClient.REFERENCEABLE_SUPER_TYPE, ImmutableSet.<String>of(),
TypesUtil.createUniqueRequiredAttrDef(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
DataTypes.STRING_TYPE));
createType(referenceableType);
HierarchicalTypeDefinition<ClassType> infraType = TypesUtil HierarchicalTypeDefinition<ClassType> infraType = TypesUtil
.createClassTypeDef(AtlasClient.INFRASTRUCTURE_SUPER_TYPE, ImmutableSet.<String>of(), NAME_ATTRIBUTE, .createClassTypeDef(AtlasClient.INFRASTRUCTURE_SUPER_TYPE, ImmutableSet.<String>of(AtlasClient.REFERENCEABLE_SUPER_TYPE), NAME_ATTRIBUTE,
DESCRIPTION_ATTRIBUTE); DESCRIPTION_ATTRIBUTE);
createType(infraType); createType(infraType);
HierarchicalTypeDefinition<ClassType> datasetType = TypesUtil HierarchicalTypeDefinition<ClassType> datasetType = TypesUtil
.createClassTypeDef(AtlasClient.DATA_SET_SUPER_TYPE, ImmutableSet.<String>of(), NAME_ATTRIBUTE, .createClassTypeDef(AtlasClient.DATA_SET_SUPER_TYPE, ImmutableSet.<String>of(AtlasClient.REFERENCEABLE_SUPER_TYPE), NAME_ATTRIBUTE,
DESCRIPTION_ATTRIBUTE); DESCRIPTION_ATTRIBUTE);
createType(datasetType); createType(datasetType);
HierarchicalTypeDefinition<ClassType> referenceableType = TypesUtil
.createClassTypeDef(AtlasClient.REFERENCEABLE_SUPER_TYPE, ImmutableSet.<String>of(),
TypesUtil.createUniqueRequiredAttrDef(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
DataTypes.STRING_TYPE));
createType(referenceableType);
HierarchicalTypeDefinition<ClassType> processType = TypesUtil HierarchicalTypeDefinition<ClassType> processType = TypesUtil
.createClassTypeDef(AtlasClient.PROCESS_SUPER_TYPE, ImmutableSet.<String>of(AtlasClient.REFERENCEABLE_SUPER_TYPE), .createClassTypeDef(AtlasClient.PROCESS_SUPER_TYPE, ImmutableSet.<String>of(AtlasClient.REFERENCEABLE_SUPER_TYPE),
TypesUtil.createRequiredAttrDef(AtlasClient.NAME, DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef(AtlasClient.NAME, DataTypes.STRING_TYPE),
......
...@@ -310,6 +310,7 @@ public class BaseRepositoryTest { ...@@ -310,6 +310,7 @@ public class BaseRepositoryTest {
List<Referenceable> columns, String... traitNames) throws Exception { List<Referenceable> columns, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames); Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames);
referenceable.set("name", name); referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description); referenceable.set("description", description);
referenceable.set("owner", owner); referenceable.set("owner", owner);
referenceable.set("tableType", tableType); referenceable.set("tableType", tableType);
...@@ -332,7 +333,7 @@ public class BaseRepositoryTest { ...@@ -332,7 +333,7 @@ public class BaseRepositoryTest {
throws Exception { throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames); Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set("name", name); referenceable.set("name", name);
referenceable.set("qualifiedName", name); referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description); referenceable.set("description", description);
referenceable.set("user", user); referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis()); referenceable.set("startTime", System.currentTimeMillis());
...@@ -353,6 +354,7 @@ public class BaseRepositoryTest { ...@@ -353,6 +354,7 @@ public class BaseRepositoryTest {
Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception { Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames); Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
referenceable.set("name", name); referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("db", dbId); referenceable.set("db", dbId);
referenceable.set("inputTables", inputTables); referenceable.set("inputTables", inputTables);
......
...@@ -188,8 +188,7 @@ public class QuickStart { ...@@ -188,8 +188,7 @@ public class QuickStart {
attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED)); attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED));
HierarchicalTypeDefinition<ClassType> viewClsDef = TypesUtil HierarchicalTypeDefinition<ClassType> viewClsDef = TypesUtil
.createClassTypeDef(VIEW_TYPE, VIEW_TYPE, null, .createClassTypeDef(VIEW_TYPE, VIEW_TYPE, ImmutableSet.of("DataSet"),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null), new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
new AttributeDefinition("inputTables", DataTypes.arrayTypeName(TABLE_TYPE), new AttributeDefinition("inputTables", DataTypes.arrayTypeName(TABLE_TYPE),
Multiplicity.COLLECTION, false, null)); Multiplicity.COLLECTION, false, null));
...@@ -358,6 +357,7 @@ public class QuickStart { ...@@ -358,6 +357,7 @@ public class QuickStart {
List<Referenceable> columns, String... traitNames) throws Exception { List<Referenceable> columns, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(TABLE_TYPE, traitNames); Referenceable referenceable = new Referenceable(TABLE_TYPE, traitNames);
referenceable.set("name", name); referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description); referenceable.set("description", description);
referenceable.set("owner", owner); referenceable.set("owner", owner);
referenceable.set("tableType", tableType); referenceable.set("tableType", tableType);
...@@ -397,6 +397,7 @@ public class QuickStart { ...@@ -397,6 +397,7 @@ public class QuickStart {
Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception { Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames); Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
referenceable.set("name", name); referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("db", dbId); referenceable.set("db", dbId);
referenceable.set(INPUT_TABLES_ATTRIBUTE, inputTables); referenceable.set(INPUT_TABLES_ATTRIBUTE, inputTables);
......
...@@ -543,7 +543,7 @@ public class EntityResource { ...@@ -543,7 +543,7 @@ public class EntityResource {
@Path("{guid}/traits") @Path("{guid}/traits")
@Consumes({Servlets.JSON_MEDIA_TYPE, MediaType.APPLICATION_JSON}) @Consumes({Servlets.JSON_MEDIA_TYPE, MediaType.APPLICATION_JSON})
@Produces(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE)
public Response addTrait(@Context HttpServletRequest request, @PathParam("guid") String guid) { public Response addTrait(@Context HttpServletRequest request, @PathParam("guid") final String guid) {
try { try {
final String traitDefinition = Servlets.getRequestPayload(request); final String traitDefinition = Servlets.getRequestPayload(request);
LOG.debug("Adding trait={} for entity={} ", traitDefinition, guid); LOG.debug("Adding trait={} for entity={} ", traitDefinition, guid);
......
...@@ -69,7 +69,7 @@ public class QuickStartIT extends BaseResourceIT { ...@@ -69,7 +69,7 @@ public class QuickStartIT extends BaseResourceIT {
} }
private Referenceable getTable(String tableName) throws AtlasServiceException { private Referenceable getTable(String tableName) throws AtlasServiceException {
return serviceClient.getEntity(QuickStart.TABLE_TYPE, "name", tableName); return serviceClient.getEntity(QuickStart.TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
} }
private void verifyTrait(Referenceable table) throws JSONException { private void verifyTrait(Referenceable table) throws JSONException {
...@@ -143,7 +143,7 @@ public class QuickStartIT extends BaseResourceIT { ...@@ -143,7 +143,7 @@ public class QuickStartIT extends BaseResourceIT {
@Test @Test
public void testViewIsAdded() throws AtlasServiceException, JSONException { public void testViewIsAdded() throws AtlasServiceException, JSONException {
Referenceable view = serviceClient.getEntity(QuickStart.VIEW_TYPE, AtlasClient.NAME, QuickStart.PRODUCT_DIM_VIEW); Referenceable view = serviceClient.getEntity(QuickStart.VIEW_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, QuickStart.PRODUCT_DIM_VIEW);
assertEquals(QuickStart.PRODUCT_DIM_VIEW, view.get(AtlasClient.NAME)); assertEquals(QuickStart.PRODUCT_DIM_VIEW, view.get(AtlasClient.NAME));
......
...@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableSet; ...@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject; import com.google.inject.Inject;
import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.notification.entity.EntityNotification; import org.apache.atlas.notification.entity.EntityNotification;
import org.apache.atlas.typesystem.IReferenceableInstance; import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.IStruct; import org.apache.atlas.typesystem.IStruct;
...@@ -113,10 +114,9 @@ public class EntityNotificationIT extends BaseResourceIT { ...@@ -113,10 +114,9 @@ public class EntityNotificationIT extends BaseResourceIT {
waitForNotification(notificationConsumer, MAX_WAIT_TIME, waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE, guid)); newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE, guid));
final String property = "name"; final String name = (String) tableInstance.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME);
final String name = (String) tableInstance.get(property);
serviceClient.deleteEntity(HIVE_TABLE_TYPE, property, name); serviceClient.deleteEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
waitForNotification(notificationConsumer, MAX_WAIT_TIME, waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_DELETE, HIVE_TABLE_TYPE, guid)); newNotificationPredicate(EntityNotification.OperationType.ENTITY_DELETE, HIVE_TABLE_TYPE, guid));
......
...@@ -236,6 +236,7 @@ public abstract class BaseResourceIT { ...@@ -236,6 +236,7 @@ public abstract class BaseResourceIT {
Referenceable tableInstance = Referenceable tableInstance =
new Referenceable(HIVE_TABLE_TYPE, "classification", "pii", "phi", "pci", "sox", "sec", "finance"); new Referenceable(HIVE_TABLE_TYPE, "classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set("name", tableName); tableInstance.set("name", tableName);
tableInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
tableInstance.set("db", databaseInstance); tableInstance.set("db", databaseInstance);
tableInstance.set("description", "bar table"); tableInstance.set("description", "bar table");
tableInstance.set("lastAccessTime", "2014-07-11T08:00:00.000Z"); tableInstance.set("lastAccessTime", "2014-07-11T08:00:00.000Z");
......
...@@ -82,7 +82,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT { ...@@ -82,7 +82,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
@Test @Test
public void testInputsGraphForEntity() throws Exception { public void testInputsGraphForEntity() throws Exception {
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, "name", salesMonthlyTable).getId()._getId(); String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesMonthlyTable).getId()._getId();
JSONObject results = serviceClient.getInputGraphForEntity(tableId); JSONObject results = serviceClient.getInputGraphForEntity(tableId);
Assert.assertNotNull(results); Assert.assertNotNull(results);
...@@ -126,7 +126,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT { ...@@ -126,7 +126,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
@Test @Test
public void testOutputsGraphForEntity() throws Exception { public void testOutputsGraphForEntity() throws Exception {
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, "name", salesFactTable).getId()._getId(); String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesFactTable).getId()._getId();
JSONObject results = serviceClient.getOutputGraphForEntity(tableId); JSONObject results = serviceClient.getOutputGraphForEntity(tableId);
Assert.assertNotNull(results); Assert.assertNotNull(results);
...@@ -172,7 +172,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT { ...@@ -172,7 +172,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
@Test @Test
public void testSchemaForEntity() throws Exception { public void testSchemaForEntity() throws Exception {
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, "name", salesFactTable).getId()._getId(); String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesFactTable).getId()._getId();
JSONObject results = serviceClient.getSchemaForEntity(tableId); JSONObject results = serviceClient.getSchemaForEntity(tableId);
Assert.assertNotNull(results); Assert.assertNotNull(results);
...@@ -271,6 +271,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT { ...@@ -271,6 +271,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
String... traitNames) throws Exception { String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames); Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames);
referenceable.set("name", name); referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description); referenceable.set("description", description);
referenceable.set("owner", owner); referenceable.set("owner", owner);
referenceable.set("tableType", tableType); referenceable.set("tableType", tableType);
...@@ -288,7 +289,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT { ...@@ -288,7 +289,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
String queryPlan, String queryId, String queryGraph, String... traitNames) throws Exception { String queryPlan, String queryId, String queryGraph, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames); Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set("name", name); referenceable.set("name", name);
referenceable.set("qualifiedName", name); referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("user", user); referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis()); referenceable.set("startTime", System.currentTimeMillis());
referenceable.set("endTime", System.currentTimeMillis() + 10000); referenceable.set("endTime", System.currentTimeMillis() + 10000);
......
...@@ -205,6 +205,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -205,6 +205,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
final String tableName = randomString(); final String tableName = randomString();
table.set("name", tableName); table.set("name", tableName);
table.set("db", db); table.set("db", db);
table.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
serviceClient.createEntity(table); serviceClient.createEntity(table);
results = serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName)); results = serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName));
...@@ -727,7 +728,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -727,7 +728,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}}); }});
LOG.debug("Updating entity= " + tableUpdated); LOG.debug("Updating entity= " + tableUpdated);
entityResult = serviceClient.updateEntity(BaseResourceIT.HIVE_TABLE_TYPE, "name", entityResult = serviceClient.updateEntity(BaseResourceIT.HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
(String) tableInstance.get("name"), tableUpdated); (String) tableInstance.get("name"), tableUpdated);
assertEquals(entityResult.getUpdateEntities().size(), 1); assertEquals(entityResult.getUpdateEntities().size(), 1);
assertEquals(entityResult.getUpdateEntities().get(0), tableId._getId()); assertEquals(entityResult.getUpdateEntities().get(0), tableId._getId());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment