Commit e0536224 by Hemanth Yamijala

ATLAS-542 Make qualifiedName and name consistent across all Datasets and Process…

ATLAS-542 Make qualifiedName and name consistent across all Datasets and Process (sumasai via yhemanth)
parent 0a44790e
......@@ -277,8 +277,8 @@ public class FalconHook extends AtlasHook implements FalconEventPublisher {
Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
tableRef.set(HiveDataModelGenerator.NAME,
HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName));
tableRef.set(HiveDataModelGenerator.TABLE_NAME, tableName);
tableName);
tableRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName));
tableRef.set(HiveDataModelGenerator.DB, dbRef);
entities.add(tableRef);
......
......@@ -154,12 +154,12 @@ public class FalconHookIT {
Id inId = (Id) ((List)processEntity.get("inputs")).get(0);
Referenceable inEntity = atlasClient.getEntity(inId._getId());
assertEquals(inEntity.get("name"),
assertEquals(inEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), inDbName, inTableName));
Id outId = (Id) ((List)processEntity.get("outputs")).get(0);
Referenceable outEntity = atlasClient.getEntity(outId._getId());
assertEquals(outEntity.get("name"),
assertEquals(outEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), outDbName, outTableName));
}
......@@ -212,7 +212,7 @@ public class FalconHookIT {
Id outId = (Id) ((List)processEntity.get("outputs")).get(0);
Referenceable outEntity = atlasClient.getEntity(outId._getId());
assertEquals(outEntity.get("name"),
assertEquals(outEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), outDbName, outTableName));
}
......
......@@ -37,7 +37,7 @@ object FSDataModel extends App {
val typesDef : TypesDef = types {
// FS DataSet
_class(FSDataTypes.FS_PATH.toString, List("DataSet", AtlasClient.REFERENCEABLE_SUPER_TYPE)) {
_class(FSDataTypes.FS_PATH.toString) {
//fully qualified path/URI to the filesystem path is stored in 'qualifiedName' and 'path'.
"path" ~ (string, required, indexed)
"createTime" ~ (date, optional, indexed)
......@@ -63,7 +63,7 @@ object FSDataModel extends App {
}
//HDFS DataSet
_class(FSDataTypes.HDFS_PATH.toString, List(FSDataTypes.FS_PATH.toString)) {
_class(FSDataTypes.HDFS_PATH.toString, List("DataSet", FSDataTypes.FS_PATH.toString)) {
//Making cluster optional since path is already unique containing the namenode URI
AtlasConstants.CLUSTER_NAME_ATTRIBUTE ~ (string, optional, indexed)
"numberOfReplicas" ~ (int, optional, indexed)
......
......@@ -35,6 +35,7 @@ import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
......@@ -336,8 +337,8 @@ public class HiveMetaStoreBridge {
}
String tableQualifiedName = getTableQualifiedName(clusterName, hiveTable);
tableReference.set(HiveDataModelGenerator.NAME, tableQualifiedName);
tableReference.set(HiveDataModelGenerator.TABLE_NAME, hiveTable.getTableName().toLowerCase());
tableReference.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName);
tableReference.set(HiveDataModelGenerator.NAME, hiveTable.getTableName().toLowerCase());
tableReference.set(HiveDataModelGenerator.OWNER, hiveTable.getOwner());
Date createDate = new Date();
......@@ -485,10 +486,8 @@ public class HiveMetaStoreBridge {
public Referenceable fillHDFSDataSet(String pathUri) {
Referenceable ref = new Referenceable(FSDataTypes.HDFS_PATH().toString());
ref.set("path", pathUri);
// Path path = new Path(pathUri);
// ref.set("name", path.getName());
//TODO - Fix after ATLAS-542 to shorter Name
ref.set(HiveDataModelGenerator.NAME, pathUri);
Path path = new Path(pathUri);
ref.set(AtlasClient.NAME, path.getName());
ref.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, pathUri);
return ref;
}
......
......@@ -282,7 +282,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
messages.add(
new HookNotification.EntityDeleteRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(),
HiveDataModelGenerator.NAME,
AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
tblQualifiedName));
}
......@@ -403,18 +403,19 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
}
private Referenceable replaceTableQFName(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName, final String newTableQFName) throws HiveException {
tableEntity.set(HiveDataModelGenerator.NAME, oldTableQFName);
tableEntity.set(HiveDataModelGenerator.TABLE_NAME, oldTable.getTableName().toLowerCase());
tableEntity.set(HiveDataModelGenerator.NAME, oldTable.getTableName().toLowerCase());
tableEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, oldTableQFName);
//Replace table entity with new name
final Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
newEntity.set(HiveDataModelGenerator.NAME, newTableQFName);
newEntity.set(HiveDataModelGenerator.TABLE_NAME, newTable.getTableName().toLowerCase());
newEntity.set(HiveDataModelGenerator.NAME, newTable.getTableName().toLowerCase());
newEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newTableQFName);
ArrayList<String> alias_list = new ArrayList<>();
alias_list.add(oldTable.getTableName().toLowerCase());
newEntity.set(HiveDataModelGenerator.TABLE_ALIAS_LIST, alias_list);
messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(), HiveDataModelGenerator.NAME,
HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
oldTableQFName, newEntity));
return newEntity;
......@@ -724,7 +725,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
for (Referenceable input : refs) {
//TODO - Change to qualifiedName later
buffer.append(":");
String dataSetQlfdName = (String) input.get(AtlasClient.NAME);
String dataSetQlfdName = (String) input.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME);
buffer.append(dataSetQlfdName.toLowerCase().replaceAll("/", ""));
}
}
......
......@@ -244,8 +244,6 @@ public class HiveDataModelGenerator {
private void createTableClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(TABLE_NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition(OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition(CREATE_TIME, DataTypes.DATE_TYPE.getName(), Multiplicity.OPTIONAL, false,
......
......@@ -198,8 +198,8 @@ public class HiveHookIT {
Assert.assertEquals(tableRef.get(HiveDataModelGenerator.TABLE_TYPE_ATTR), TableType.MANAGED_TABLE.name());
Assert.assertEquals(tableRef.get(HiveDataModelGenerator.COMMENT), "table comment");
String entityName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName);
Assert.assertEquals(tableRef.get(NAME), entityName);
Assert.assertEquals(tableRef.get(NAME), "default." + tableName.toLowerCase() + "@" + CLUSTER_NAME);
Assert.assertEquals(tableRef.get(HiveDataModelGenerator.NAME), tableName.toLowerCase());
Assert.assertEquals(tableRef.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), entityName);
Table t = hiveMetaStoreBridge.hiveClient.getTable(DEFAULT_DB, tableName);
long createTime = Long.parseLong(t.getMetadata().getProperty(hive_metastoreConstants.DDL_TIME)) * HiveMetaStoreBridge.MILLIS_CONVERT_FACTOR;
......@@ -263,7 +263,7 @@ public class HiveHookIT {
List<Id> tableRef = (List<Id>) processReference.get(attrName);
for(int i = 0; i < expectedTableNames.length; i++) {
Referenceable entity = atlasClient.getEntity(tableRef.get(i)._getId());
Assert.assertEquals(entity.get(AtlasClient.NAME), expectedTableNames[i]);
Assert.assertEquals(entity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), expectedTableNames[i]);
}
}
......@@ -1009,8 +1009,7 @@ public class HiveHookIT {
Referenceable hdfsPathRef = atlasClient.getEntity(hdfsPathId);
Assert.assertEquals(hdfsPathRef.get("path"), testPathNormed);
Assert.assertEquals(hdfsPathRef.get(NAME), testPathNormed);
// Assert.assertEquals(hdfsPathRef.get("name"), new Path(testPath).getName());
Assert.assertEquals(hdfsPathRef.get(NAME), new Path(testPathNormed).getName());
Assert.assertEquals(hdfsPathRef.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), testPathNormed);
return hdfsPathRef.getId()._getId();
......@@ -1018,7 +1017,7 @@ public class HiveHookIT {
private String assertHDFSPathIsRegistered(String path) throws Exception {
LOG.debug("Searching for hdfs path {}", path);
return assertEntityIsRegistered(FSDataTypes.HDFS_PATH().toString(), NAME, path, null);
return assertEntityIsRegistered(FSDataTypes.HDFS_PATH().toString(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, path, null);
}
@Test
......@@ -1403,7 +1402,7 @@ public class HiveHookIT {
if (inputTblName != null) {
Referenceable inputTableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.name(), new HashMap<String, Object>() {{
put(NAME, inputTblName);
put(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, inputTblName);
}});
inputs = new ArrayList<Referenceable>();
inputs.add(inputTableRef);
......@@ -1411,7 +1410,7 @@ public class HiveHookIT {
List<Referenceable> outputs = null;
if (outputTblName != null) {
Referenceable outputTableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.name(), new HashMap<String, Object>() {{
put(NAME, outputTblName);
put(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, outputTblName);
}});
outputs = new ArrayList<Referenceable>();
......@@ -1448,13 +1447,13 @@ public class HiveHookIT {
private void assertTableIsNotRegistered(String dbName, String tableName, boolean isTemporaryTable) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName);
String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, isTemporaryTable);
assertEntityIsNotRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.NAME, tableQualifiedName);
assertEntityIsNotRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName);
}
private void assertTableIsNotRegistered(String dbName, String tableName) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName);
String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, false);
assertEntityIsNotRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.NAME, tableQualifiedName);
assertEntityIsNotRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName);
}
private void assertDBIsNotRegistered(String dbName) throws Exception {
......@@ -1474,7 +1473,7 @@ public class HiveHookIT {
private String assertTableIsRegistered(String dbName, String tableName, AssertPredicate assertPredicate, boolean isTemporary) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName);
String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, isTemporary);
return assertEntityIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.NAME, tableQualifiedName,
return assertEntityIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName,
assertPredicate);
}
......
......@@ -72,9 +72,9 @@ public class SqoopHook extends SqoopJobDataPublisher {
public Referenceable createHiveTableInstance(String clusterName, Referenceable dbRef,
String tableName, String dbName) throws Exception {
Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
tableRef.set(HiveDataModelGenerator.NAME,
tableRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName));
tableRef.set(HiveDataModelGenerator.TABLE_NAME, tableName.toLowerCase());
tableRef.set(HiveDataModelGenerator.NAME, tableName.toLowerCase());
tableRef.set(HiveDataModelGenerator.DB, dbRef);
return tableRef;
}
......@@ -93,6 +93,7 @@ public class SqoopHook extends SqoopJobDataPublisher {
String source = table != null ? table : query;
String name = getSqoopDBStoreName(data);
storeRef.set(SqoopDataModelGenerator.NAME, name);
storeRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
storeRef.set(SqoopDataModelGenerator.DB_STORE_TYPE, data.getStoreType());
storeRef.set(SqoopDataModelGenerator.DB_STORE_USAGE, usage);
storeRef.set(SqoopDataModelGenerator.STORE_URI, data.getUrl());
......
......@@ -106,7 +106,7 @@ public class SqoopHookIT {
private String assertDBStoreIsRegistered(String storeName) throws Exception {
LOG.debug("Searching for db store {}", storeName);
String query = String.format(
"%s as t where name = '%s'" + " select t",
"%s as t where " + AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME + " = '%s'" + " select t",
SqoopDataTypes.SQOOP_DBDATASTORE.getName(), storeName);
return assertEntityIsRegistered(query);
}
......@@ -114,7 +114,7 @@ public class SqoopHookIT {
private String assertHiveTableIsRegistered(String dbName, String tableName) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName);
String query = String.format(
"%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'" + " select t",
"%s as t where " + AtlasClient.NAME + " = '%s', db where " + AtlasClient.NAME + " = '%s' and clusterName = '%s'" + " select t",
HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME);
return assertEntityIsRegistered(query);
}
......
......@@ -195,18 +195,19 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
topologyOwner = ANONYMOUS_OWNER;
}
dataSetReferenceable.set("owner", topologyOwner);
dataSetReferenceable.set("name", getKafkaTopicQualifiedName(getClusterName(stormConf), topicName));
dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getKafkaTopicQualifiedName(getClusterName(stormConf), topicName));
dataSetReferenceable.set(AtlasClient.NAME, topicName);
break;
case "HBaseBolt":
dataSetReferenceable = new Referenceable(StormDataTypes.HBASE_TABLE.getName());
final String hbaseTableName = config.get("HBaseBolt.tableName");
dataSetReferenceable.set("uri", stormConf.get("hbase.rootdir"));
dataSetReferenceable.set("tableName", hbaseTableName);
dataSetReferenceable.set(AtlasClient.NAME, hbaseTableName);
dataSetReferenceable.set("owner", stormConf.get("storm.kerberos.principal"));
clusterName = extractComponentClusterName(HBaseConfiguration.create(), stormConf);
//TODO - Hbase Namespace is hardcoded to 'default'. need to check how to get this or is it already part of tableName
dataSetReferenceable.set("name", getHbaseTableQualifiedName(clusterName, HBASE_NAMESPACE_DEFAULT,
dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getHbaseTableQualifiedName(clusterName, HBASE_NAMESPACE_DEFAULT,
hbaseTableName));
break;
......@@ -220,10 +221,8 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, hdfsPathStr);
dataSetReferenceable.set("path", hdfsPathStr);
dataSetReferenceable.set("owner", stormConf.get("hdfs.kerberos.principal"));
//Fix after ATLAS-542
// final Path hdfsPath = new Path(hdfsPathStr);
// dataSetReferenceable.set(AtlasClient.NAME, hdfsPath.getName());
dataSetReferenceable.set(AtlasClient.NAME, hdfsPathStr);
final Path hdfsPath = new Path(hdfsPathStr);
dataSetReferenceable.set(AtlasClient.NAME, hdfsPath.getName());
break;
case "HiveBolt":
......@@ -240,9 +239,9 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
dataSetReferenceable = new Referenceable("hive_table");
final String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(clusterName,
databaseName, hiveTableName);
dataSetReferenceable.set(HiveDataModelGenerator.NAME, tableQualifiedName);
dataSetReferenceable.set(HiveDataModelGenerator.NAME, hiveTableName);
dataSetReferenceable.set(HiveDataModelGenerator.DB, dbReferenceable);
dataSetReferenceable.set(HiveDataModelGenerator.TABLE_NAME, hiveTableName);
dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName);
break;
default:
......@@ -294,7 +293,7 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
SpoutSpec stormSpout) throws IllegalAccessException {
Referenceable spoutReferenceable = new Referenceable(
StormDataTypes.STORM_SPOUT.getName(), "DataProducer");
spoutReferenceable.set("name", spoutName);
spoutReferenceable.set(AtlasClient.NAME, spoutName);
Serializable instance = Utils.javaDeserialize(
stormSpout.get_spout_object().get_serialized_java(), Serializable.class);
......@@ -319,7 +318,7 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
Referenceable boltReferenceable = new Referenceable(
StormDataTypes.STORM_BOLT.getName(), "DataProcessor");
boltReferenceable.set("name", boltName);
boltReferenceable.set(AtlasClient.NAME, boltName);
Serializable instance = Utils.javaDeserialize(
stormBolt.get_bolt_object().get_serialized_java(), Serializable.class);
......
......@@ -96,7 +96,6 @@ object StormDataModel extends App {
// HBase Data Set
_class(StormDataTypes.HBASE_TABLE.getName, List("DataSet")) {
"tableName" ~ (string, required, unique, indexed)
"uri" ~ (string, required)
"owner" ~ (string, required, indexed)
}
......
......@@ -14,16 +14,15 @@ hive_type(ClassType) - super types [] - attributes [name, type1, type2, fields]
hive_storagedesc(ClassType) - super types [Referenceable] - attributes [cols, location, inputFormat, outputFormat, compressed, numBuckets, serdeInfo, bucketCols, sortCols, parameters, storedAsSubDirectories]
hive_role(ClassType) - super types [] - attributes [roleName, createTime, ownerName]
hive_column(ClassType) - super types [Referenceable] - attributes [name, type, comment]
hive_table(ClassType) - super types [DataSet] - attributes [tableName, db, owner, createTime, lastAccessTime, comment, retention, sd, partitionKeys, columns, parameters, viewOriginalText, viewExpandedText, tableType, temporary]
hive_table(ClassType) - super types [DataSet] - attributes [name, db, owner, createTime, lastAccessTime, comment, retention, sd, partitionKeys, columns, parameters, viewOriginalText, viewExpandedText, tableType, temporary]
hive_partition(ClassType) - super types [Referenceable] - attributes [values, table, createTime, lastAccessTime, sd, columns, parameters]
hive_process(ClassType) - super types [Process] - attributes [startTime, endTime, userName, operationType, queryText, queryPlan, queryId, queryGraph]
</verbatim>
The entities are created and de-duped using unique qualified name. They provide namespace and can be used for querying/lineage as well. Note that dbName and tableName should be in lower case. clusterName is explained below.
The entities are created and de-duped using unique qualified name. They provide namespace and can be used for querying/lineage as well. Note that name, dbName and tableName should be in lower case. clusterName is explained below.
* hive_db - attribute qualifiedName - <dbName>@<clusterName>
* hive_table - attribute name - <dbName>.<tableName>@<clusterName>
* hive_table - attribute qualifiedName - <dbName>.<name>@<clusterName>
* hive_column - attribute qualifiedName - <dbName>.<tableName>.<columnName>@<clusterName>
* hive_partition - attribute qualifiedName - <dbName>.<tableName>.<partitionValues('-' separated)>@<clusterName>
* hive_process - attribute name - <queryString> - trimmed query string in lower case
......
......@@ -3,6 +3,7 @@ Apache Atlas Release Notes
--trunk - unreleased
INCOMPATIBLE CHANGES:
ATLAS-542 Make qualifiedName and name consistent across all Datasets and Process (sumasai via yhemanth)
ATLAS-716 Entity update/delete notifications (shwethags)
ATLAS-619 Canonicalize hive queries (sumasai)
ATLAS-497 Simple Authorization (saqeeb.s via yhemanth)
......
......@@ -63,7 +63,7 @@ public class DataSetLineageService implements LineageService {
private static final String DATASET_EXISTS_QUERY = AtlasClient.DATA_SET_SUPER_TYPE + " where __guid = '%s'";
private static final String DATASET_NAME_EXISTS_QUERY =
AtlasClient.DATA_SET_SUPER_TYPE + " where name = '%s' and __state = 'ACTIVE'";
AtlasClient.DATA_SET_SUPER_TYPE + " where " + AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME + "='%s' and __state = 'ACTIVE'";
private static final Configuration propertiesConf;
......
......@@ -165,28 +165,28 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang
}
private static final AttributeDefinition NAME_ATTRIBUTE =
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE);
TypesUtil.createRequiredAttrDef(AtlasClient.NAME, DataTypes.STRING_TYPE);
private static final AttributeDefinition DESCRIPTION_ATTRIBUTE =
TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE);
@InterfaceAudience.Private
private void createSuperTypes() throws AtlasException {
HierarchicalTypeDefinition<ClassType> referenceableType = TypesUtil
.createClassTypeDef(AtlasClient.REFERENCEABLE_SUPER_TYPE, ImmutableSet.<String>of(),
TypesUtil.createUniqueRequiredAttrDef(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
DataTypes.STRING_TYPE));
createType(referenceableType);
HierarchicalTypeDefinition<ClassType> infraType = TypesUtil
.createClassTypeDef(AtlasClient.INFRASTRUCTURE_SUPER_TYPE, ImmutableSet.<String>of(), NAME_ATTRIBUTE,
.createClassTypeDef(AtlasClient.INFRASTRUCTURE_SUPER_TYPE, ImmutableSet.<String>of(AtlasClient.REFERENCEABLE_SUPER_TYPE), NAME_ATTRIBUTE,
DESCRIPTION_ATTRIBUTE);
createType(infraType);
HierarchicalTypeDefinition<ClassType> datasetType = TypesUtil
.createClassTypeDef(AtlasClient.DATA_SET_SUPER_TYPE, ImmutableSet.<String>of(), NAME_ATTRIBUTE,
.createClassTypeDef(AtlasClient.DATA_SET_SUPER_TYPE, ImmutableSet.<String>of(AtlasClient.REFERENCEABLE_SUPER_TYPE), NAME_ATTRIBUTE,
DESCRIPTION_ATTRIBUTE);
createType(datasetType);
HierarchicalTypeDefinition<ClassType> referenceableType = TypesUtil
.createClassTypeDef(AtlasClient.REFERENCEABLE_SUPER_TYPE, ImmutableSet.<String>of(),
TypesUtil.createUniqueRequiredAttrDef(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
DataTypes.STRING_TYPE));
createType(referenceableType);
HierarchicalTypeDefinition<ClassType> processType = TypesUtil
.createClassTypeDef(AtlasClient.PROCESS_SUPER_TYPE, ImmutableSet.<String>of(AtlasClient.REFERENCEABLE_SUPER_TYPE),
TypesUtil.createRequiredAttrDef(AtlasClient.NAME, DataTypes.STRING_TYPE),
......
......@@ -310,6 +310,7 @@ public class BaseRepositoryTest {
List<Referenceable> columns, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description);
referenceable.set("owner", owner);
referenceable.set("tableType", tableType);
......@@ -332,7 +333,7 @@ public class BaseRepositoryTest {
throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("qualifiedName", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description);
referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis());
......@@ -353,6 +354,7 @@ public class BaseRepositoryTest {
Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("db", dbId);
referenceable.set("inputTables", inputTables);
......
......@@ -188,8 +188,7 @@ public class QuickStart {
attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED));
HierarchicalTypeDefinition<ClassType> viewClsDef = TypesUtil
.createClassTypeDef(VIEW_TYPE, VIEW_TYPE, null,
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
.createClassTypeDef(VIEW_TYPE, VIEW_TYPE, ImmutableSet.of("DataSet"),
new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
new AttributeDefinition("inputTables", DataTypes.arrayTypeName(TABLE_TYPE),
Multiplicity.COLLECTION, false, null));
......@@ -358,6 +357,7 @@ public class QuickStart {
List<Referenceable> columns, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(TABLE_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description);
referenceable.set("owner", owner);
referenceable.set("tableType", tableType);
......@@ -397,6 +397,7 @@ public class QuickStart {
Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("db", dbId);
referenceable.set(INPUT_TABLES_ATTRIBUTE, inputTables);
......
......@@ -543,7 +543,7 @@ public class EntityResource {
@Path("{guid}/traits")
@Consumes({Servlets.JSON_MEDIA_TYPE, MediaType.APPLICATION_JSON})
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response addTrait(@Context HttpServletRequest request, @PathParam("guid") String guid) {
public Response addTrait(@Context HttpServletRequest request, @PathParam("guid") final String guid) {
try {
final String traitDefinition = Servlets.getRequestPayload(request);
LOG.debug("Adding trait={} for entity={} ", traitDefinition, guid);
......
......@@ -69,7 +69,7 @@ public class QuickStartIT extends BaseResourceIT {
}
private Referenceable getTable(String tableName) throws AtlasServiceException {
return serviceClient.getEntity(QuickStart.TABLE_TYPE, "name", tableName);
return serviceClient.getEntity(QuickStart.TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
}
private void verifyTrait(Referenceable table) throws JSONException {
......@@ -143,7 +143,7 @@ public class QuickStartIT extends BaseResourceIT {
@Test
public void testViewIsAdded() throws AtlasServiceException, JSONException {
Referenceable view = serviceClient.getEntity(QuickStart.VIEW_TYPE, AtlasClient.NAME, QuickStart.PRODUCT_DIM_VIEW);
Referenceable view = serviceClient.getEntity(QuickStart.VIEW_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, QuickStart.PRODUCT_DIM_VIEW);
assertEquals(QuickStart.PRODUCT_DIM_VIEW, view.get(AtlasClient.NAME));
......
......@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.notification.entity.EntityNotification;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.IStruct;
......@@ -113,10 +114,9 @@ public class EntityNotificationIT extends BaseResourceIT {
waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE, guid));
final String property = "name";
final String name = (String) tableInstance.get(property);
final String name = (String) tableInstance.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME);
serviceClient.deleteEntity(HIVE_TABLE_TYPE, property, name);
serviceClient.deleteEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_DELETE, HIVE_TABLE_TYPE, guid));
......
......@@ -236,6 +236,7 @@ public abstract class BaseResourceIT {
Referenceable tableInstance =
new Referenceable(HIVE_TABLE_TYPE, "classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set("name", tableName);
tableInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
tableInstance.set("db", databaseInstance);
tableInstance.set("description", "bar table");
tableInstance.set("lastAccessTime", "2014-07-11T08:00:00.000Z");
......
......@@ -82,7 +82,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
@Test
public void testInputsGraphForEntity() throws Exception {
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, "name", salesMonthlyTable).getId()._getId();
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesMonthlyTable).getId()._getId();
JSONObject results = serviceClient.getInputGraphForEntity(tableId);
Assert.assertNotNull(results);
......@@ -126,7 +126,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
@Test
public void testOutputsGraphForEntity() throws Exception {
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, "name", salesFactTable).getId()._getId();
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesFactTable).getId()._getId();
JSONObject results = serviceClient.getOutputGraphForEntity(tableId);
Assert.assertNotNull(results);
......@@ -172,7 +172,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
@Test
public void testSchemaForEntity() throws Exception {
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, "name", salesFactTable).getId()._getId();
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesFactTable).getId()._getId();
JSONObject results = serviceClient.getSchemaForEntity(tableId);
Assert.assertNotNull(results);
......@@ -271,6 +271,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description);
referenceable.set("owner", owner);
referenceable.set("tableType", tableType);
......@@ -288,7 +289,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
String queryPlan, String queryId, String queryGraph, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("qualifiedName", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis());
referenceable.set("endTime", System.currentTimeMillis() + 10000);
......
......@@ -205,6 +205,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
final String tableName = randomString();
table.set("name", tableName);
table.set("db", db);
table.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
serviceClient.createEntity(table);
results = serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName));
......@@ -727,7 +728,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}});
LOG.debug("Updating entity= " + tableUpdated);
entityResult = serviceClient.updateEntity(BaseResourceIT.HIVE_TABLE_TYPE, "name",
entityResult = serviceClient.updateEntity(BaseResourceIT.HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
(String) tableInstance.get("name"), tableUpdated);
assertEquals(entityResult.getUpdateEntities().size(), 1);
assertEquals(entityResult.getUpdateEntities().get(0), tableId._getId());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment