Commit fef50cee by Shwetha GS

IDE java code reformat

parent 22624786
No related merge requests found
......@@ -61,14 +61,16 @@ public class FalconImporter {
this.repository = repo;
}
private Entity getEntity(FalconClient client, EntityType type, String name) throws FalconCLIException, JAXBException {
private Entity getEntity(FalconClient client, EntityType type, String name)
throws FalconCLIException, JAXBException {
String entityStr = client.getDefinition(type.name(), name);
return (Entity) type.getUnmarshaller().unmarshal(new StringReader(entityStr));
}
public void importClusters() throws MetadataException {
try {
EntityList clusters = client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null, null);
EntityList clusters =
client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null, null);
for (EntityList.EntityElement element : clusters.getElements()) {
Cluster cluster = (Cluster) getEntity(client, EntityType.CLUSTER, element.name);
......@@ -80,7 +82,8 @@ public class FalconImporter {
acl.set("owner", cluster.getACL().getOwner());
acl.set("group", cluster.getACL().getGroup());
acl.set("permission", cluster.getACL().getPermission());
StructType aclType = typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.ACL.name());
StructType aclType =
typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.ACL.name());
clusterRef.set("acl", aclType.convert(acl, Multiplicity.REQUIRED));
}
......@@ -88,7 +91,8 @@ public class FalconImporter {
String[] parts = cluster.getTags().split(",");
List<ITypedInstance> tags = new ArrayList<>();
for (String part : parts) {
TraitType tagType = typeSystem.getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name());
TraitType tagType =
typeSystem.getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name());
String[] kv = part.trim().split("=");
Struct tag = new Struct(FalconTypeSystem.DefinedTypes.TAG.name());
tag.set("name", kv[0]);
......@@ -106,10 +110,12 @@ public class FalconImporter {
List<ITypedInstance> locations = new ArrayList<>();
for (Location loc : cluster.getLocations().getLocations()) {
Struct location = new Struct(FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION.name());
EnumType locationType = typeSystem.getDataType(EnumType.class, FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION_TYPE.name());
EnumType locationType = typeSystem.getDataType(EnumType.class,
FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION_TYPE.name());
location.set("type", locationType.fromValue(loc.getName().toUpperCase()));
location.set("path", loc.getPath());
StructType type = typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION.name());
StructType type = typeSystem
.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION.name());
locations.add(type.convert(location, Multiplicity.REQUIRED));
}
clusterRef.set("locations", locations);
......@@ -122,7 +128,8 @@ public class FalconImporter {
interfaceStruct.set("type", interfaceFld.getType().name());
interfaceStruct.set("endpoint", interfaceFld.getEndpoint());
interfaceStruct.set("version", interfaceFld.getVersion());
StructType type = typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_INTERFACE.name());
StructType type = typeSystem
.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_INTERFACE.name());
interfaces.add(type.convert(interfaceStruct, Multiplicity.REQUIRED));
}
clusterRef.set("interfaces", interfaces);
......
......@@ -54,7 +54,7 @@ public class FalconTypeSystem {
public static FalconTypeSystem getInstance() throws MetadataException {
if (INSTANCE == null) {
synchronized(LOG) {
synchronized (LOG) {
if (INSTANCE == null) {
INSTANCE = new FalconTypeSystem();
}
......@@ -73,12 +73,16 @@ public class FalconTypeSystem {
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("acl", DefinedTypes.ACL.name(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("tags", DefinedTypes.TAG.name(), Multiplicity.COLLECTION, false, null),
new AttributeDefinition("locations", TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.COLLECTION, false, null),
new AttributeDefinition("interfaces", DefinedTypes.CLUSTER_INTERFACE.name(), Multiplicity.COLLECTION, false, null),
new AttributeDefinition("properties", TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.OPTIONAL, false, null),
};
new AttributeDefinition("locations",
TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(),
Multiplicity.COLLECTION, false, null),
new AttributeDefinition("interfaces", DefinedTypes.CLUSTER_INTERFACE.name(), Multiplicity.COLLECTION,
false, null), new AttributeDefinition("properties",
TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(),
Multiplicity.OPTIONAL, false, null),};
HierarchicalTypeDefinition<ClassType> cluster =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.CLUSTER.name(), ImmutableList.<String>of(), attributeDefinitions);
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.CLUSTER.name(),
ImmutableList.<String>of(), attributeDefinitions);
LOG.debug("Created definition for " + DefinedTypes.CLUSTER.name());
return cluster;
}
......@@ -86,57 +90,52 @@ public class FalconTypeSystem {
private HierarchicalTypeDefinition<TraitType> defineTags() {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("value", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null)
};
new AttributeDefinition("value", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null)};
HierarchicalTypeDefinition<TraitType> traitType = new HierarchicalTypeDefinition<>(TraitType.class, DefinedTypes.TAG.name(), ImmutableList.<String>of(), attributeDefinitions);
HierarchicalTypeDefinition<TraitType> traitType =
new HierarchicalTypeDefinition<>(TraitType.class, DefinedTypes.TAG.name(), ImmutableList.<String>of(),
attributeDefinitions);
LOG.debug("Created definition for " + DefinedTypes.TAG.name());
traitTypeDefinitions.add(traitType);
return traitType;
}
private StructTypeDefinition defineClusterLocation() throws MetadataException {
EnumValue values[] = {
new EnumValue("WORKING", 1),
new EnumValue("STAGING", 2),
new EnumValue("TEMP", 3),
};
EnumValue values[] = {new EnumValue("WORKING", 1), new EnumValue("STAGING", 2), new EnumValue("TEMP", 3),};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_LOCATION_TYPE.name());
EnumTypeDefinition locationType = new EnumTypeDefinition(DefinedTypes.CLUSTER_LOCATION_TYPE.name(), values);
TYPE_SYSTEM.defineEnumType(locationType);
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("type", DefinedTypes.CLUSTER_LOCATION_TYPE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("path", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
};
new AttributeDefinition("type", DefinedTypes.CLUSTER_LOCATION_TYPE.name(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("path", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_LOCATION.name());
StructTypeDefinition location = new StructTypeDefinition(DefinedTypes.CLUSTER_LOCATION.name(), attributeDefinitions);
StructTypeDefinition location =
new StructTypeDefinition(DefinedTypes.CLUSTER_LOCATION.name(), attributeDefinitions);
structTypeDefinitions.add(location);
return location;
}
private StructTypeDefinition defineClusterInterface() throws MetadataException {
EnumValue values[] = {
new EnumValue("READONLY", 1),
new EnumValue("WRITE", 2),
new EnumValue("EXECUTE", 3),
new EnumValue("WORKFLOW", 4),
new EnumValue("MESSAGING", 5),
new EnumValue("REGISTRY", 6),
};
EnumValue values[] = {new EnumValue("READONLY", 1), new EnumValue("WRITE", 2), new EnumValue("EXECUTE", 3),
new EnumValue("WORKFLOW", 4), new EnumValue("MESSAGING", 5), new EnumValue("REGISTRY", 6),};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_INTERFACE_TYPE.name());
EnumTypeDefinition interfaceType = new EnumTypeDefinition(DefinedTypes.CLUSTER_INTERFACE_TYPE.name(), values);
TYPE_SYSTEM.defineEnumType(interfaceType);
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("type", DefinedTypes.CLUSTER_INTERFACE_TYPE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("endpoint", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("version", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
};
new AttributeDefinition("type", DefinedTypes.CLUSTER_INTERFACE_TYPE.name(), Multiplicity.REQUIRED,
false, null),
new AttributeDefinition("endpoint", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("version", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_INTERFACE.name());
StructTypeDefinition interfaceEntity = new StructTypeDefinition(DefinedTypes.CLUSTER_INTERFACE.name(), attributeDefinitions);
StructTypeDefinition interfaceEntity =
new StructTypeDefinition(DefinedTypes.CLUSTER_INTERFACE.name(), attributeDefinitions);
structTypeDefinitions.add(interfaceEntity);
return interfaceEntity;
}
......@@ -154,13 +153,10 @@ public class FalconTypeSystem {
private StructTypeDefinition defineACL() {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("group", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("permission", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("group", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("permission", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),};
LOG.debug("Created definition for " + DefinedTypes.ACL.name());
StructTypeDefinition acl = new StructTypeDefinition(DefinedTypes.ACL.name(), attributeDefinitions);
structTypeDefinitions.add(acl);
......
......@@ -48,11 +48,12 @@ public class FalconImporterTest {
FalconTypeSystem.getInstance();
FalconImporter importer = new FalconImporter(client, repo);
when(client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null,
null)).thenReturn(getEntityList());
when(client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null, null))
.thenReturn(getEntityList());
//TODO Set other fields in cluster
when(client.getDefinition(anyString(), anyString())).thenReturn(getCluster());
when(repo.createEntity(any(IReferenceableInstance.class), anyString())).thenReturn(UUID.randomUUID().toString());
when(repo.createEntity(any(IReferenceableInstance.class), anyString()))
.thenReturn(UUID.randomUUID().toString());
importer.importClusters();
}
......
......@@ -29,7 +29,9 @@ public class FalconTypeSystemTest {
@Test
public void testTypeSystem() throws MetadataException {
FalconTypeSystem.getInstance();
Assert.assertNotNull(TypeSystem.getInstance().getDataType(ClassType.class, FalconTypeSystem.DefinedTypes.CLUSTER.name()));
Assert.assertNotNull(TypeSystem.getInstance().getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name()));
Assert.assertNotNull(
TypeSystem.getInstance().getDataType(ClassType.class, FalconTypeSystem.DefinedTypes.CLUSTER.name()));
Assert.assertNotNull(
TypeSystem.getInstance().getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name()));
}
}
......@@ -159,9 +159,8 @@ public class HiveMetaStoreBridge {
LOG.debug("Getting reference for database {}", databaseName);
String typeName = HiveDataTypes.HIVE_DB.getName();
String dslQuery = String.format("%s where %s = '%s' and %s = '%s'", typeName,
HiveDataModelGenerator.NAME, databaseName.toLowerCase(), HiveDataModelGenerator.CLUSTER_NAME,
clusterName);
String dslQuery = String.format("%s where %s = '%s' and %s = '%s'", typeName, HiveDataModelGenerator.NAME,
databaseName.toLowerCase(), HiveDataModelGenerator.CLUSTER_NAME, clusterName);
return getEntityReferenceFromDSL(typeName, dslQuery);
}
......@@ -170,11 +169,12 @@ public class HiveMetaStoreBridge {
String typeName = HiveDataTypes.HIVE_PROCESS.getName();
//todo enable DSL
// String dslQuery = String.format("%s where queryText = \"%s\"", typeName, queryStr);
// return getEntityReferenceFromDSL(typeName, dslQuery);
// String dslQuery = String.format("%s where queryText = \"%s\"", typeName, queryStr);
// return getEntityReferenceFromDSL(typeName, dslQuery);
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()",
typeName, typeName, StringEscapeUtils.escapeJava(queryStr));
String gremlinQuery =
String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()", typeName, typeName,
StringEscapeUtils.escapeJava(queryStr));
return getEntityReferenceFromGremlin(typeName, gremlinQuery);
}
......@@ -216,9 +216,8 @@ public class HiveMetaStoreBridge {
return getEntityReferenceFromDSL(typeName, dslQuery);
}
private Referenceable getEntityReferenceFromGremlin(String typeName, String gremlinQuery) throws
AtlasServiceException,
JSONException {
private Referenceable getEntityReferenceFromGremlin(String typeName, String gremlinQuery)
throws AtlasServiceException, JSONException {
AtlasClient client = getAtlasClient();
JSONObject response = client.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(AtlasClient.RESULTS);
......@@ -236,7 +235,8 @@ public class HiveMetaStoreBridge {
//todo replace gremlin with DSL
// String dslQuery = String.format("%s as p where values = %s, tableName where name = '%s', "
// + "dbName where name = '%s' and clusterName = '%s' select p", typeName, valuesStr, tableName,
// + "dbName where name = '%s' and clusterName = '%s' select p", typeName, valuesStr,
// tableName,
// dbName, clusterName);
String datasetType = AtlasClient.DATA_SET_SUPER_TYPE;
......@@ -373,9 +373,8 @@ public class HiveMetaStoreBridge {
return partRef;
}
private void importIndexes(String db, String table,
Referenceable dbReferenceable,
Referenceable tableReferenceable) throws Exception {
private void importIndexes(String db, String table, Referenceable dbReferenceable, Referenceable tableReferenceable)
throws Exception {
List<Index> indexes = hiveClient.getIndexes(db, table, Short.MAX_VALUE);
if (indexes.size() > 0) {
for (Index index : indexes) {
......@@ -385,9 +384,8 @@ public class HiveMetaStoreBridge {
}
//todo should be idempotent
private void importIndex(Index index,
Referenceable dbReferenceable,
Referenceable tableReferenceable) throws Exception {
private void importIndex(Index index, Referenceable dbReferenceable, Referenceable tableReferenceable)
throws Exception {
LOG.info("Importing index {} for {}.{}", index.getIndexName(), dbReferenceable, tableReferenceable);
Referenceable indexRef = new Referenceable(HiveDataTypes.HIVE_INDEX.getName());
......@@ -411,7 +409,8 @@ public class HiveMetaStoreBridge {
createInstance(indexRef);
}
private Referenceable fillStorageDescStruct(StorageDescriptor storageDesc, List<Referenceable> colList) throws Exception {
private Referenceable fillStorageDescStruct(StorageDescriptor storageDesc, List<Referenceable> colList)
throws Exception {
LOG.debug("Filling storage descriptor information for " + storageDesc);
Referenceable sdReferenceable = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.getName());
......@@ -429,7 +428,8 @@ public class HiveMetaStoreBridge {
sdReferenceable.set("serdeInfo", serdeInfoStruct);
sdReferenceable.set(HiveDataModelGenerator.STORAGE_NUM_BUCKETS, storageDesc.getNumBuckets());
sdReferenceable.set(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS, storageDesc.isStoredAsSubDirectories());
sdReferenceable
.set(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS, storageDesc.isStoredAsSubDirectories());
//Use the passed column list if not null, ex: use same references for table and SD
List<FieldSchema> columns = storageDesc.getCols();
......@@ -469,8 +469,7 @@ public class HiveMetaStoreBridge {
return createInstance(sdReferenceable);
}
private List<Referenceable> getColumns(List<FieldSchema> schemaList) throws Exception
{
private List<Referenceable> getColumns(List<FieldSchema> schemaList) throws Exception {
List<Referenceable> colList = new ArrayList<>();
for (FieldSchema fs : schemaList) {
LOG.debug("Processing field " + fs);
......@@ -489,7 +488,7 @@ public class HiveMetaStoreBridge {
AtlasClient dgiClient = getAtlasClient();
//Register hive data model if its not already registered
if (dgiClient.getType(HiveDataTypes.HIVE_PROCESS.getName()) == null ) {
if (dgiClient.getType(HiveDataTypes.HIVE_PROCESS.getName()) == null) {
LOG.info("Registering Hive data model");
dgiClient.createType(dataModelGenerator.getModelAsJson());
} else {
......
......@@ -348,7 +348,7 @@ public class HiveHook implements ExecuteWithHookContext {
explain.initialize(event.conf, event.queryPlan, null);
List<Task<?>> rootTasks = event.queryPlan.getRootTasks();
return explain.getJSONPlan(null, null, rootTasks, event.queryPlan.getFetchTask(), true, false, false);
} catch(Exception e) {
} catch (Exception e) {
LOG.warn("Failed to get queryplan", e);
return new JSONObject();
}
......
......@@ -19,8 +19,8 @@
package org.apache.atlas.hive.model;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.TypesDef;
import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.atlas.typesystem.types.AttributeDefinition;
......@@ -102,12 +102,8 @@ public class HiveDataModelGenerator {
}
public TypesDef getTypesDef() {
return TypeUtils.getTypesDef(
getEnumTypeDefinitions(),
getStructTypeDefinitions(),
getTraitTypeDefinitions(),
getClassTypeDefinitions()
);
return TypeUtils.getTypesDef(getEnumTypeDefinitions(), getStructTypeDefinitions(), getTraitTypeDefinitions(),
getClassTypeDefinitions());
}
public String getDataModelAsJSON() {
......@@ -131,108 +127,82 @@ public class HiveDataModelGenerator {
}
private void createHiveObjectTypeEnum() throws AtlasException {
EnumValue values[] = {
new EnumValue("GLOBAL", 1),
new EnumValue("DATABASE", 2),
new EnumValue("TABLE", 3),
new EnumValue("PARTITION", 4),
new EnumValue("COLUMN", 5),
};
EnumTypeDefinition definition = new EnumTypeDefinition(
HiveDataTypes.HIVE_OBJECT_TYPE.getName(), values);
EnumValue values[] = {new EnumValue("GLOBAL", 1), new EnumValue("DATABASE", 2), new EnumValue("TABLE", 3),
new EnumValue("PARTITION", 4), new EnumValue("COLUMN", 5),};
EnumTypeDefinition definition = new EnumTypeDefinition(HiveDataTypes.HIVE_OBJECT_TYPE.getName(), values);
enumTypeDefinitionMap.put(HiveDataTypes.HIVE_OBJECT_TYPE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_OBJECT_TYPE.getName());
}
private void createHivePrincipalTypeEnum() throws AtlasException {
EnumValue values[] = {
new EnumValue("USER", 1),
new EnumValue("ROLE", 2),
new EnumValue("GROUP", 3),
};
EnumValue values[] = {new EnumValue("USER", 1), new EnumValue("ROLE", 2), new EnumValue("GROUP", 3),};
EnumTypeDefinition definition = new EnumTypeDefinition(
HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(), values);
EnumTypeDefinition definition = new EnumTypeDefinition(HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(), values);
enumTypeDefinitionMap.put(HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName());
}
private void createResourceTypeEnum() throws AtlasException {
EnumValue values[] = {
new EnumValue("JAR", 1),
new EnumValue("FILE", 2),
new EnumValue("ARCHIVE", 3),
};
EnumTypeDefinition definition = new EnumTypeDefinition(
HiveDataTypes.HIVE_RESOURCE_TYPE.getName(), values);
EnumValue values[] = {new EnumValue("JAR", 1), new EnumValue("FILE", 2), new EnumValue("ARCHIVE", 3),};
EnumTypeDefinition definition = new EnumTypeDefinition(HiveDataTypes.HIVE_RESOURCE_TYPE.getName(), values);
enumTypeDefinitionMap.put(HiveDataTypes.HIVE_RESOURCE_TYPE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_RESOURCE_TYPE.getName());
}
private void createSerDeStruct() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("serializationLib", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
StructTypeDefinition definition = new StructTypeDefinition(
HiveDataTypes.HIVE_SERDE.getName(), attributeDefinitions);
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("serializationLib", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
false, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),};
StructTypeDefinition definition =
new StructTypeDefinition(HiveDataTypes.HIVE_SERDE.getName(), attributeDefinitions);
structTypeDefinitionMap.put(HiveDataTypes.HIVE_SERDE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_SERDE.getName());
}
private void createOrderStruct() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("col", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("order", DataTypes.INT_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
};
new AttributeDefinition("col", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("order", DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED, false, null),};
StructTypeDefinition definition = new StructTypeDefinition(
HiveDataTypes.HIVE_ORDER.getName(), attributeDefinitions);
StructTypeDefinition definition =
new StructTypeDefinition(HiveDataTypes.HIVE_ORDER.getName(), attributeDefinitions);
structTypeDefinitionMap.put(HiveDataTypes.HIVE_ORDER.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_ORDER.getName());
}
private void createStorageDescClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("cols",
String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.getName()),
new AttributeDefinition("cols", String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.COLLECTION, false, null),
new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("outputFormat", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition(STORAGE_NUM_BUCKETS, DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("serdeInfo", HiveDataTypes.HIVE_SERDE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("bucketCols",
String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sortCols",
String.format("array<%s>", HiveDataTypes.HIVE_ORDER.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("outputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition(STORAGE_NUM_BUCKETS, DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("serdeInfo", HiveDataTypes.HIVE_SERDE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("bucketCols", String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sortCols", String.format("array<%s>", HiveDataTypes.HIVE_ORDER.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
//new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.getName(),
// Multiplicity.OPTIONAL, false, null),
new AttributeDefinition(STORAGE_IS_STORED_AS_SUB_DIRS, DataTypes.BOOLEAN_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
Multiplicity.OPTIONAL, false, null),};
HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
ClassType.class, HiveDataTypes.HIVE_STORAGEDESC.getName(), null, attributeDefinitions);
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_STORAGEDESC.getName(), null,
attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_STORAGEDESC.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_STORAGEDESC.getName());
}
......@@ -243,54 +213,45 @@ public class HiveDataModelGenerator {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("resourceType", HiveDataTypes.HIVE_RESOURCE_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("uri", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
};
StructTypeDefinition definition = new StructTypeDefinition(
HiveDataTypes.HIVE_RESOURCEURI.getName(), attributeDefinitions);
new AttributeDefinition("uri", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),};
StructTypeDefinition definition =
new StructTypeDefinition(HiveDataTypes.HIVE_RESOURCEURI.getName(), attributeDefinitions);
structTypeDefinitionMap.put(HiveDataTypes.HIVE_RESOURCEURI.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_RESOURCEURI.getName());
}
private void createDBClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition(CLUSTER_NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerType", HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition(CLUSTER_NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("ownerType", HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(), Multiplicity.OPTIONAL,
false, null),};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_DB.getName(),
null, attributeDefinitions);
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_DB.getName(), null,
attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_DB.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_DB.getName());
}
private void createTypeClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("type1", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("type2", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("fields", String.format("array<%s>",
HiveDataTypes.HIVE_COLUMN.getName()), Multiplicity.OPTIONAL, false, null),
};
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("type1", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("type2", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("fields", String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, false, null),};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_TYPE.getName(),
null, attributeDefinitions);
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_TYPE.getName(), null,
attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_TYPE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_TYPE.getName());
......@@ -298,17 +259,12 @@ public class HiveDataModelGenerator {
private void createColumnClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition(COMMENT, DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition(COMMENT, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(
ClassType.class, HiveDataTypes.HIVE_COLUMN.getName(),
null, attributeDefinitions);
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_COLUMN.getName(), null,
attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_COLUMN.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_COLUMN.getName());
}
......@@ -317,62 +273,50 @@ public class HiveDataModelGenerator {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("values", DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition(TABLE, HiveDataTypes.HIVE_TABLE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("columns",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
new AttributeDefinition(TABLE, HiveDataTypes.HIVE_TABLE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("columns", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class,
HiveDataTypes.HIVE_PARTITION.getName(), null, attributeDefinitions);
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_PARTITION.getName(), null,
attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_PARTITION.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PARTITION.getName());
}
private void createTableClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(TABLE_NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition(COMMENT, DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("retention", DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("partitionKeys",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("columns",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
new AttributeDefinition(TABLE_NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition(COMMENT, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("retention", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("partitionKeys", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("columns", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewExpandedText", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("tableType", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("temporary", DataTypes.BOOLEAN_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
false, null),
new AttributeDefinition("viewExpandedText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
false, null),
new AttributeDefinition("tableType", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("temporary", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_TABLE.getName(),
ImmutableList.of("DataSet"), attributeDefinitions);
......@@ -382,27 +326,23 @@ public class HiveDataModelGenerator {
private void createIndexClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("indexHandlerClass", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("origTable", HiveDataTypes.HIVE_TABLE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("indexTable", HiveDataTypes.HIVE_TABLE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("deferredRebuild", DataTypes.BOOLEAN_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("indexHandlerClass", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED,
false, null),
new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("origTable", HiveDataTypes.HIVE_TABLE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("indexTable", HiveDataTypes.HIVE_TABLE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("deferredRebuild", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL,
false, null),};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_INDEX.getName(),
......@@ -413,15 +353,15 @@ public class HiveDataModelGenerator {
private void createRoleClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("roleName", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
};
HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
ClassType.class, HiveDataTypes.HIVE_ROLE.getName(), null, attributeDefinitions);
new AttributeDefinition("roleName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_ROLE.getName(), null,
attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_ROLE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_ROLE.getName());
......@@ -429,24 +369,20 @@ public class HiveDataModelGenerator {
private void createProcessClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("startTime", DataTypes.LONG_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("endTime", DataTypes.LONG_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("userName", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("queryText", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("queryPlan", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("queryId", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("queryGraph", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
new AttributeDefinition("startTime", DataTypes.LONG_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("endTime", DataTypes.LONG_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("userName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("queryText", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("queryPlan", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("queryId", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("queryGraph", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),};
HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
ClassType.class, HiveDataTypes.HIVE_PROCESS.getName(),
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_PROCESS.getName(),
ImmutableList.of(AtlasClient.PROCESS_SUPER_TYPE), attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_PROCESS.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PROCESS.getName());
......
......@@ -70,27 +70,22 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest {
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
// create new aliases
try {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
KEYSTORE_PASSWORD_KEY, storepass);
provider.createCredentialEntry(KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
TRUSTSTORE_PASSWORD_KEY, trustpass);
provider.createCredentialEntry(TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
"ssl.client.truststore.password", trustpass2);
provider.createCredentialEntry("ssl.client.truststore.password", trustpass2);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SERVER_CERT_PASSWORD_KEY, certpass);
provider.createCredentialEntry(SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks
provider.flush();
......@@ -132,8 +127,7 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest {
hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE,
System.getProperty("user.dir") + "/target/atlas");
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/atlas");
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true");
......
......@@ -121,8 +121,8 @@ public class HiveHookIT {
private String createTable(boolean partition) throws Exception {
String tableName = tableName();
runCommand("create table " + tableName + "(id int, name string) comment 'table comment' "
+ (partition ? " partitioned by(dt string)" : ""));
runCommand("create table " + tableName + "(id int, name string) comment 'table comment' " + (partition ?
" partitioned by(dt string)" : ""));
return tableName;
}
......@@ -146,7 +146,7 @@ public class HiveHookIT {
final Id sdId = (Id) tableRef.get("sd");
Referenceable sdRef = dgiCLient.getEntity(sdId.id);
Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS),false);
Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS), false);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered(DEFAULT_DB);
......@@ -154,7 +154,8 @@ public class HiveHookIT {
private String assertColumnIsRegistered(String colName) throws Exception {
LOG.debug("Searching for column {}", colName);
String query = String.format("%s where name = '%s'", HiveDataTypes.HIVE_COLUMN.getName(), colName.toLowerCase());
String query =
String.format("%s where name = '%s'", HiveDataTypes.HIVE_COLUMN.getName(), colName.toLowerCase());
return assertEntityIsRegistered(query, true);
}
......@@ -196,8 +197,9 @@ public class HiveHookIT {
public void testInsert() throws Exception {
String tableName = createTable();
String insertTableName = createTable();
String query = "insert into " + insertTableName + " partition(dt = '2015-01-01') select id, name from "
+ tableName + " where dt = '2015-01-01'";
String query =
"insert into " + insertTableName + " partition(dt = '2015-01-01') select id, name from " + tableName
+ " where dt = '2015-01-01'";
runCommand(query);
assertProcessIsRegistered(query);
......@@ -278,13 +280,14 @@ public class HiveHookIT {
}
private void assertProcessIsRegistered(String queryStr) throws Exception {
// String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
// normalize(queryStr));
// assertEntityIsRegistered(dslQuery, true);
// String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
// normalize(queryStr));
// assertEntityIsRegistered(dslQuery, true);
//todo replace with DSL
String typeName = HiveDataTypes.HIVE_PROCESS.getName();
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()",
typeName, typeName, normalize(queryStr));
String gremlinQuery =
String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()", typeName, typeName,
normalize(queryStr));
JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(AtlasClient.RESULTS);
Assert.assertEquals(results.length(), 1);
......@@ -307,9 +310,9 @@ public class HiveHookIT {
private String assertTableIsRegistered(String dbName, String tableName, boolean registered) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName);
String query = String.format("%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'"
+ " select t", HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(),
CLUSTER_NAME);
String query = String.format(
"%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'" + " select t",
HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME);
return assertEntityIsRegistered(query, registered);
}
......@@ -336,7 +339,7 @@ public class HiveHookIT {
Assert.assertEquals(results.length(), 1);
}
private String assertEntityIsRegistered(String dslQuery, boolean registered) throws Exception{
private String assertEntityIsRegistered(String dslQuery, boolean registered) throws Exception {
JSONArray results = dgiCLient.searchByDSL(dslQuery);
if (registered) {
Assert.assertEquals(results.length(), 1);
......
......@@ -92,7 +92,8 @@ public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty("atlas.http.authentication.type", "kerberos");
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY,
SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
......
......@@ -18,8 +18,8 @@
package org.apache.atlas.hive.hook;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.security.SecurityProperties;
......@@ -107,7 +107,8 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty("atlas.http.authentication.type", "kerberos");
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY,
SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
......@@ -215,7 +216,8 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
}
private void assertInstanceIsRegistered(final String typeName, final String colName, final String colValue) throws Exception {
private void assertInstanceIsRegistered(final String typeName, final String colName, final String colValue)
throws Exception {
Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
......
......@@ -18,8 +18,8 @@
package org.apache.atlas.hive.hook;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.security.SecurityProperties;
......@@ -73,7 +73,9 @@ public class SSLHiveHookIT {
super(port, path);
}
public Server getServer () { return server; }
public Server getServer() {
return server;
}
@Override
public PropertiesConfiguration getConfiguration() {
......@@ -113,7 +115,8 @@ public class SSLHiveHookIT {
configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY,
SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
......@@ -153,27 +156,22 @@ public class SSLHiveHookIT {
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
// create new aliases
try {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
KEYSTORE_PASSWORD_KEY, storepass);
provider.createCredentialEntry(KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
TRUSTSTORE_PASSWORD_KEY, trustpass);
provider.createCredentialEntry(TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
"ssl.client.truststore.password", trustpass2);
provider.createCredentialEntry("ssl.client.truststore.password", trustpass2);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SERVER_CERT_PASSWORD_KEY, certpass);
provider.createCredentialEntry(SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks
provider.flush();
......@@ -217,7 +215,7 @@ public class SSLHiveHookIT {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
}
private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception{
private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception {
JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
Assert.assertEquals(results.length(), 1);
}
......
......@@ -41,19 +41,15 @@ import java.util.Map.Entry;
public abstract class ABridge implements IBridge {
protected static final Logger LOG = BridgeManager.LOG;
protected ArrayList<Class<? extends AEntityBean>> typeBeanClasses
= new ArrayList<Class<? extends AEntityBean>>();
protected ArrayList<Class<? extends AEntityBean>> typeBeanClasses = new ArrayList<Class<? extends AEntityBean>>();
MetadataRepository repo;
protected ABridge(MetadataRepository repo) {
this.repo = repo;
}
protected HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name,
ImmutableList<String>
superTypes,
AttributeDefinition...
attrDefs) {
protected HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name, ImmutableList<String> superTypes,
AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(ClassType.class, name, superTypes, attrDefs);
}
......@@ -67,8 +63,7 @@ public abstract class ABridge implements IBridge {
// turn into a HiveLineageBean
try {
Class<AEntityBean> c = getTypeBeanInListByName(ref.getTypeName());
return this.convertFromITypedReferenceable(ref,
getTypeBeanInListByName(ref.getTypeName()));
return this.convertFromITypedReferenceable(ref, getTypeBeanInListByName(ref.getTypeName()));
} catch (BridgeException | InstantiationException | IllegalAccessException |
IllegalArgumentException | InvocationTargetException | NoSuchMethodException |
SecurityException e) {
......@@ -80,8 +75,7 @@ public abstract class ABridge implements IBridge {
public String create(AEntityBean bean) throws MetadataException {
ClassType type = TypeSystem.getInstance()
.getDataType(ClassType.class, bean.getClass().getSimpleName());
ClassType type = TypeSystem.getInstance().getDataType(ClassType.class, bean.getClass().getSimpleName());
ITypedReferenceableInstance refBean = null;
try {
refBean = type.convert(this.convertToReferencable(bean), Multiplicity.REQUIRED);
......@@ -140,10 +134,10 @@ public abstract class ABridge implements IBridge {
return selfAware;
}
protected final <T extends AEntityBean> T convertFromITypedReferenceable(
ITypedReferenceableInstance instance, Class<? extends AEntityBean> c)
throws InstantiationException, IllegalAccessException, IllegalArgumentException,
InvocationTargetException, NoSuchMethodException, SecurityException, BridgeException {
protected final <T extends AEntityBean> T convertFromITypedReferenceable(ITypedReferenceableInstance instance,
Class<? extends AEntityBean> c)
throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException,
NoSuchMethodException, SecurityException, BridgeException {
if (!instance.getTypeName().equals(c.getSimpleName())) {
throw new BridgeException("ReferenceableInstance type not the same as bean");
}
......@@ -151,10 +145,8 @@ public abstract class ABridge implements IBridge {
for (Entry<String, AttributeInfo> e : instance.fieldMapping().fields.entrySet()) {
try {
String convertedName = e.getKey().substring(0, 1).toUpperCase() +
e.getKey().substring(1);
this.getClass().getMethod("set" + convertedName,
Class.forName(e.getValue().dataType().getName()))
String convertedName = e.getKey().substring(0, 1).toUpperCase() + e.getKey().substring(1);
this.getClass().getMethod("set" + convertedName, Class.forName(e.getValue().dataType().getName()))
.invoke(this, instance.get(e.getKey()));
} catch (MetadataException | ClassNotFoundException e1) {
// TODO Auto-generated catch block
......
......@@ -50,13 +50,12 @@ public class BridgeManager {
@Inject
BridgeManager(MetadataRepository rs)
throws ConfigurationException, ClassNotFoundException, InstantiationException,
IllegalAccessException, IllegalArgumentException, InvocationTargetException,
NoSuchMethodException, SecurityException {
throws ConfigurationException, ClassNotFoundException, InstantiationException, IllegalAccessException,
IllegalArgumentException, InvocationTargetException, NoSuchMethodException, SecurityException {
this.ts = TypeSystem.getInstance();
this.rs = rs;
if (System.getProperty("bridgeManager.propsFile") != null &&
System.getProperty("bridgeManager.propsFile").length() != 0) {
if (System.getProperty("bridgeManager.propsFile") != null
&& System.getProperty("bridgeManager.propsFile").length() != 0) {
setActiveBridges(System.getProperty("bridgeManager.propsFile"));
} else {
setActiveBridges(bridgeFileDefault);
......@@ -73,8 +72,7 @@ public class BridgeManager {
}
public final static HierarchicalTypeDefinition<ClassType>
convertEntityBeanToClassTypeDefinition(
public final static HierarchicalTypeDefinition<ClassType> convertEntityBeanToClassTypeDefinition(
Class<? extends AEntityBean> class1) {
ArrayList<AttributeDefinition> attDefAL = new ArrayList<AttributeDefinition>();
for (Field f : class1.getFields()) {
......@@ -87,18 +85,16 @@ public class BridgeManager {
}
}
HierarchicalTypeDefinition<ClassType> typeDef = new HierarchicalTypeDefinition<>(
ClassType.class, class1.getSimpleName(),
null, (AttributeDefinition[]) attDefAL.toArray(new AttributeDefinition[0]));
HierarchicalTypeDefinition<ClassType> typeDef =
new HierarchicalTypeDefinition<>(ClassType.class, class1.getSimpleName(), null,
(AttributeDefinition[]) attDefAL.toArray(new AttributeDefinition[0]));
return typeDef;
}
public final static AttributeDefinition convertFieldtoAttributeDefiniton(Field f)
throws MetadataException {
public final static AttributeDefinition convertFieldtoAttributeDefiniton(Field f) throws MetadataException {
return new AttributeDefinition(f.getName(), f.getType().getSimpleName(),
Multiplicity.REQUIRED, false, null);
return new AttributeDefinition(f.getName(), f.getType().getSimpleName(), Multiplicity.REQUIRED, false, null);
}
public ArrayList<ABridge> getActiveBridges() {
......@@ -116,8 +112,7 @@ public class BridgeManager {
try {
BridgeManager.LOG.info("Loading : Active Bridge List");
config.load(bridgePropFileName);
String[] activeBridgeList = ((String) config.getProperty("BridgeManager.activeBridges"))
.split(",");
String[] activeBridgeList = ((String) config.getProperty("BridgeManager.activeBridges")).split(",");
BridgeManager.LOG.info("Loaded : Active Bridge List");
BridgeManager.LOG.info("First Loaded :" + activeBridgeList[0]);
......@@ -125,8 +120,7 @@ public class BridgeManager {
Class<?> bridgeCls = (Class<?>) Class.forName(s);
if (ABridge.class.isAssignableFrom(bridgeCls)) {
System.out.println(s + " is able to be instaciated");
aBList.add((ABridge) bridgeCls.getConstructor(MetadataRepository.class)
.newInstance(rs));
aBList.add((ABridge) bridgeCls.getConstructor(MetadataRepository.class).newInstance(rs));
}
}
......
......@@ -40,44 +40,39 @@ public class BridgeTypeBootstrapper {
private boolean isSetup = false;
@Inject
BridgeTypeBootstrapper(Map<Class, IBridge> bridges)
throws MetadataException {
BridgeTypeBootstrapper(Map<Class, IBridge> bridges) throws MetadataException {
this.bridges = bridges;
}
public final static HierarchicalTypeDefinition<ClassType>
convertEntityBeanToClassTypeDefinition(
public final static HierarchicalTypeDefinition<ClassType> convertEntityBeanToClassTypeDefinition(
Class<? extends AEntityBean> class1) {
ArrayList<AttributeDefinition> attDefAL = new ArrayList<AttributeDefinition>();
for (Field f : class1.getFields()) {
try {
attDefAL.add(BridgeTypeBootstrapper.convertFieldtoAttributeDefiniton(f));
} catch (MetadataException e) {
BridgeManager.LOG.error("Class " + class1.getName()
+ " cannot be converted to TypeDefinition");
BridgeManager.LOG.error("Class " + class1.getName() + " cannot be converted to TypeDefinition");
e.printStackTrace();
}
}
HierarchicalTypeDefinition<ClassType> typeDef = new HierarchicalTypeDefinition<>(
ClassType.class, class1.getSimpleName(), null,
(AttributeDefinition[]) attDefAL
.toArray(new AttributeDefinition[0]));
HierarchicalTypeDefinition<ClassType> typeDef =
new HierarchicalTypeDefinition<>(ClassType.class, class1.getSimpleName(), null,
(AttributeDefinition[]) attDefAL.toArray(new AttributeDefinition[0]));
return typeDef;
}
public final static AttributeDefinition convertFieldtoAttributeDefiniton(
Field f) throws MetadataException {
public final static AttributeDefinition convertFieldtoAttributeDefiniton(Field f) throws MetadataException {
return new AttributeDefinition(f.getName(),
f.getType().getSimpleName().toLowerCase(), Multiplicity.REQUIRED, false, null);
return new AttributeDefinition(f.getName(), f.getType().getSimpleName().toLowerCase(), Multiplicity.REQUIRED,
false, null);
}
public synchronized boolean bootstrap() throws MetadataException {
if (isSetup)
if (isSetup) {
return false;
else {
} else {
LOG.info("Bootstrapping types");
_bootstrap();
isSetup = true;
......@@ -94,12 +89,10 @@ public class BridgeTypeBootstrapper {
}
}
private final boolean loadTypes(IBridge bridge, TypeSystem ts)
throws MetadataException {
private final boolean loadTypes(IBridge bridge, TypeSystem ts) throws MetadataException {
for (Class<? extends AEntityBean> clazz : bridge.getTypeBeanClasses()) {
LOG.info("Registering %s", clazz.getSimpleName());
ts.defineClassType(BridgeTypeBootstrapper
.convertEntityBeanToClassTypeDefinition(clazz));
ts.defineClassType(BridgeTypeBootstrapper.convertEntityBeanToClassTypeDefinition(clazz));
}
return false;
}
......
......@@ -79,8 +79,7 @@ public class HiveMetaImporter {
public static boolean databasesImport() throws MetaException, RepositoryException {
ClassType classType = null;
try {
classType = TypeSystem.getInstance()
.getDataType(ClassType.class, HiveStructureBridge.DB_CLASS_TYPE);
classType = TypeSystem.getInstance().getDataType(ClassType.class, HiveStructureBridge.DB_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
......@@ -118,8 +117,7 @@ public class HiveMetaImporter {
public static boolean tablesImport(String dbName) throws MetaException, RepositoryException {
ClassType classType = null;
try {
classType = TypeSystem.getInstance()
.getDataType(ClassType.class, HiveStructureBridge.TB_CLASS_TYPE);
classType = TypeSystem.getInstance().getDataType(ClassType.class, HiveStructureBridge.TB_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
......@@ -129,8 +127,7 @@ public class HiveMetaImporter {
return true;
}
public static boolean tableImport(String dbName, String tbName)
throws MetaException, RepositoryException {
public static boolean tableImport(String dbName, String tbName) throws MetaException, RepositoryException {
try {
Table tb = msc.getTable(dbName, tbName);
Referenceable tbRef = new Referenceable(HiveStructureBridge.TB_CLASS_TYPE);
......@@ -157,12 +154,10 @@ public class HiveMetaImporter {
return true;
}
public static boolean fieldsImport(String dbName, String tbName)
throws MetaException, RepositoryException {
public static boolean fieldsImport(String dbName, String tbName) throws MetaException, RepositoryException {
ClassType classType = null;
try {
classType = TypeSystem.getInstance()
.getDataType(ClassType.class, HiveStructureBridge.FD_CLASS_TYPE);
classType = TypeSystem.getInstance().getDataType(ClassType.class, HiveStructureBridge.FD_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
......@@ -190,8 +185,7 @@ public class HiveMetaImporter {
return true;
}
public static boolean fieldImport(String dbName, String tbName, String fdName)
throws MetaException {
public static boolean fieldImport(String dbName, String tbName, String fdName) throws MetaException {
try {
for (FieldSchema fs : msc.getFields(dbName, tbName)) {
if (fs.getName().equals(fs)) {
......
......@@ -45,60 +45,50 @@ public class HiveStructureBridge extends ABridge {
}
public boolean defineBridgeTypes(TypeSystem ts) {
ArrayList<HierarchicalTypeDefinition<?>> al
= new ArrayList<HierarchicalTypeDefinition<?>>();
ArrayList<HierarchicalTypeDefinition<?>> al = new ArrayList<HierarchicalTypeDefinition<?>>();
// TODO
//convert to helper methods
// Add to arrayList
try {
HierarchicalTypeDefinition<ClassType> databaseClassTypeDef
= new HierarchicalTypeDefinition<ClassType>("ClassType", DB_CLASS_TYPE, null,
HierarchicalTypeDefinition<ClassType> databaseClassTypeDef =
new HierarchicalTypeDefinition<ClassType>("ClassType", DB_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("DESC", "STRING_TYPE", Multiplicity.OPTIONAL,
new AttributeDefinition("DESC", "STRING_TYPE", Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("DB_LOCATION_URI", "STRING_TYPE", Multiplicity.REQUIRED,
false, null),
new AttributeDefinition("DB_LOCATION_URI", "STRING_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("NAME", "STRING_TYPE", Multiplicity.REQUIRED,
false, null),
new AttributeDefinition("OWNER_TYPE", "STRING_TYPE",
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("OWNER_NAME", "STRING_TYPE",
Multiplicity.OPTIONAL, false, null)
}
);
HierarchicalTypeDefinition<ClassType> tableClassTypeDef
= new HierarchicalTypeDefinition<ClassType>("ClassType", TB_CLASS_TYPE, null,
new AttributeDefinition("NAME", "STRING_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("OWNER_TYPE", "STRING_TYPE", Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("OWNER_NAME", "STRING_TYPE", Multiplicity.OPTIONAL, false,
null)});
HierarchicalTypeDefinition<ClassType> tableClassTypeDef =
new HierarchicalTypeDefinition<ClassType>("ClassType", TB_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("CREATE_TIME", "LONG_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("LAST_ACCESS_TIME", "LONG_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("OWNER", "STRING_TYPE", Multiplicity.REQUIRED,
new AttributeDefinition("CREATE_TIME", "LONG_TYPE", Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("LAST_ACCESS_TIME", "LONG_TYPE", Multiplicity.REQUIRED,
false, null),
new AttributeDefinition("TBL_NAME", "STRING_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("TBL_TYPE", "STRING_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("VIEW_EXPANDED_TEXT", "STRING_TYPE",
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("VIEW_ORIGINAL_TEXT", "STRING_TYPE",
Multiplicity.OPTIONAL, false, null)
}
);
new AttributeDefinition("OWNER", "STRING_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("TBL_NAME", "STRING_TYPE", Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("TBL_TYPE", "STRING_TYPE", Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("VIEW_EXPANDED_TEXT", "STRING_TYPE", Multiplicity.OPTIONAL,
false, null),
new AttributeDefinition("VIEW_ORIGINAL_TEXT", "STRING_TYPE", Multiplicity.OPTIONAL,
false, null)});
HierarchicalTypeDefinition<ClassType> columnClassTypeDef
= new HierarchicalTypeDefinition<ClassType>("ClassType", FD_CLASS_TYPE, null,
HierarchicalTypeDefinition<ClassType> columnClassTypeDef =
new HierarchicalTypeDefinition<ClassType>("ClassType", FD_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("COMMENT", "STRING_TYPE", Multiplicity.OPTIONAL,
false, null),
new AttributeDefinition("COLUMN_NAME", "STRING_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("TYPE_NAME", "STRING_TYPE",
Multiplicity.REQUIRED, false, null)
}
);
new AttributeDefinition("COMMENT", "STRING_TYPE", Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("COLUMN_NAME", "STRING_TYPE", Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("TYPE_NAME", "STRING_TYPE", Multiplicity.REQUIRED, false,
null)});
} catch (ClassNotFoundException e) {
e.printStackTrace();
......@@ -108,8 +98,7 @@ public class HiveStructureBridge extends ABridge {
try {
ts.defineClassType(htd);
} catch (MetadataException e) {
System.out.println(
htd.hierarchicalMetaTypeName + "could not be added to the type system");
System.out.println(htd.hierarchicalMetaTypeName + "could not be added to the type system");
e.printStackTrace();
}
}
......
......@@ -33,8 +33,7 @@ import java.util.ArrayList;
import java.util.List;
public class BridgeModule extends AbstractModule {
public static final Logger LOG = LoggerFactory
.getLogger(BridgeModule.class);
public static final Logger LOG = LoggerFactory.getLogger(BridgeModule.class);
@Override
protected void configure() {
......@@ -44,11 +43,9 @@ public class BridgeModule extends AbstractModule {
bind(BridgeTypeBootstrapper.class).in(Scopes.SINGLETON);
// Load the configured bridge classes and add them to the map binder
MapBinder<Class, IBridge> mapbinder = MapBinder.newMapBinder(binder(),
Class.class, IBridge.class);
MapBinder<Class, IBridge> mapbinder = MapBinder.newMapBinder(binder(), Class.class, IBridge.class);
String propsURI = System.getProperty("bridgeManager.propsFile",
"bridge-manager.properties");
String propsURI = System.getProperty("bridgeManager.propsFile", "bridge-manager.properties");
List<Class<? extends IBridge>> bridges = getBridgeClasses(propsURI);
for (Class<? extends IBridge> bridgeClass : bridges) {
......@@ -59,8 +56,7 @@ public class BridgeModule extends AbstractModule {
/*
* Get the bridge classes from the configuration file
*/
private List<Class<? extends IBridge>> getBridgeClasses(
String bridgePropFileName) {
private List<Class<? extends IBridge>> getBridgeClasses(String bridgePropFileName) {
List<Class<? extends IBridge>> aBList = new ArrayList<Class<? extends IBridge>>();
PropertiesConfiguration config = new PropertiesConfiguration();
......@@ -68,13 +64,11 @@ public class BridgeModule extends AbstractModule {
try {
LOG.info("Loading : Active Bridge List");
config.load(bridgePropFileName);
String[] activeBridgeList = ((String) config
.getProperty("BridgeManager.activeBridges")).split(",");
String[] activeBridgeList = ((String) config.getProperty("BridgeManager.activeBridges")).split(",");
LOG.info("Loaded : Active Bridge List");
for (String s : activeBridgeList) {
Class<? extends IBridge> bridgeCls = (Class<? extends IBridge>) Class
.forName(s);
Class<? extends IBridge> bridgeCls = (Class<? extends IBridge>) Class.forName(s);
aBList.add(bridgeCls);
}
......
......@@ -38,8 +38,7 @@ public class BridgeManagerTest {
BridgeManager bm = new BridgeManager(repo);
System.out.println(bm.getActiveBridges().size());
Assert.assertEquals(bm.activeBridges.get(0).getClass().getSimpleName(),
"HiveLineageBridge");
Assert.assertEquals(bm.activeBridges.get(0).getClass().getSimpleName(), "HiveLineageBridge");
}
@Test
......
......@@ -50,8 +50,7 @@ public class TestHiveLineageBridge {
String oneId;
private HiveLineage loadHiveLineageBean(String path) throws IOException {
return new Gson().fromJson(new InputStreamReader(this.getClass().getResourceAsStream(path)),
HiveLineage.class);
return new Gson().fromJson(new InputStreamReader(this.getClass().getResourceAsStream(path)), HiveLineage.class);
}
@BeforeClass
......
......@@ -86,7 +86,8 @@ public class AtlasClient {
try {
clientConfig = getClientProperties();
if (clientConfig.getBoolean(TLS_ENABLED, false)) {
// create an SSL properties configuration if one doesn't exist. SSLFactory expects a file, so forced to create a
// create an SSL properties configuration if one doesn't exist. SSLFactory expects a file, so forced
// to create a
// configuration object, persist it, then subsequently pass in an empty configuration to SSLFactory
SecureClientUtils.persistSSLClientConfiguration(clientConfig);
}
......@@ -246,12 +247,12 @@ public class AtlasClient {
* @return result json object
* @throws AtlasServiceException
*/
public JSONArray rawSearch(String typeName, String attributeName, Object attributeValue) throws
AtlasServiceException {
// String gremlinQuery = String.format(
// "g.V.has(\"typeName\",\"%s\").and(_().has(\"%s.%s\", T.eq, \"%s\")).toList()",
// typeName, typeName, attributeName, attributeValue);
// return searchByGremlin(gremlinQuery);
public JSONArray rawSearch(String typeName, String attributeName, Object attributeValue)
throws AtlasServiceException {
// String gremlinQuery = String.format(
// "g.V.has(\"typeName\",\"%s\").and(_().has(\"%s.%s\", T.eq, \"%s\")).toList()",
// typeName, typeName, attributeName, attributeValue);
// return searchByGremlin(gremlinQuery);
String dslQuery = String.format("%s where %s = \"%s\"", typeName, attributeName, attributeValue);
return searchByDSL(dslQuery);
}
......@@ -341,13 +342,11 @@ public class AtlasClient {
private JSONObject callAPIWithResource(API api, WebResource resource, Object requestObject)
throws AtlasServiceException {
ClientResponse clientResponse = resource
.accept(JSON_MEDIA_TYPE)
.type(JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(JSON_MEDIA_TYPE).type(JSON_MEDIA_TYPE)
.method(api.getMethod(), ClientResponse.class, requestObject);
Response.Status expectedStatus = HttpMethod.POST.equals(api.getMethod())
? Response.Status.CREATED : Response.Status.OK;
Response.Status expectedStatus =
HttpMethod.POST.equals(api.getMethod()) ? Response.Status.CREATED : Response.Status.OK;
if (clientResponse.getStatus() == expectedStatus.getStatusCode()) {
String responseAsString = clientResponse.getEntity(String.class);
try {
......@@ -360,8 +359,7 @@ public class AtlasClient {
throw new AtlasServiceException(api, clientResponse);
}
private JSONObject callAPI(API api, Object requestObject,
String... pathParams) throws AtlasServiceException {
private JSONObject callAPI(API api, Object requestObject, String... pathParams) throws AtlasServiceException {
WebResource resource = getResource(api, pathParams);
return callAPIWithResource(api, resource, requestObject);
}
......
......@@ -62,9 +62,7 @@ public class SecureClientUtils {
public static URLConnectionClientHandler getClientConnectionHandler(DefaultClientConfig config,
PropertiesConfiguration clientConfig) {
config.getProperties().put(
URLConnectionClientHandler.PROPERTY_HTTP_URL_CONNECTION_SET_METHOD_WORKAROUND,
true);
config.getProperties().put(URLConnectionClientHandler.PROPERTY_HTTP_URL_CONNECTION_SET_METHOD_WORKAROUND, true);
Configuration conf = new Configuration(false);
conf.addResource(conf.get(SSLFactory.SSL_CLIENT_CONF_KEY, "ssl-client.xml"));
String authType = "simple";
......@@ -95,11 +93,9 @@ public class SecureClientUtils {
return new URLConnectionClientHandler(httpURLConnectionFactory);
}
private final static ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR =
new ConnectionConfigurator() {
private final static ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR = new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn)
throws IOException {
public HttpURLConnection configure(HttpURLConnection conn) throws IOException {
setTimeouts(conn, DEFAULT_SOCKET_TIMEOUT);
return conn;
}
......@@ -109,14 +105,13 @@ public class SecureClientUtils {
try {
return newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
} catch (Exception e) {
LOG.debug("Cannot load customized ssl related configuration. " +
"Fallback to system-generic settings.", e);
LOG.debug("Cannot load customized ssl related configuration. " + "Fallback to system-generic settings.", e);
return DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
}
}
private static ConnectionConfigurator newSslConnConfigurator(final int timeout,
Configuration conf) throws IOException, GeneralSecurityException {
private static ConnectionConfigurator newSslConnConfigurator(final int timeout, Configuration conf)
throws IOException, GeneralSecurityException {
final SSLFactory factory;
final SSLSocketFactory sf;
final HostnameVerifier hv;
......@@ -128,8 +123,7 @@ public class SecureClientUtils {
return new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn)
throws IOException {
public HttpURLConnection configure(HttpURLConnection conn) throws IOException {
if (conn instanceof HttpsURLConnection) {
HttpsURLConnection c = (HttpsURLConnection) conn;
c.setSSLSocketFactory(sf);
......@@ -168,7 +162,8 @@ public class SecureClientUtils {
return new File(sslDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
}
public static void persistSSLClientConfiguration(PropertiesConfiguration clientConfig) throws AtlasException, IOException {
public static void persistSSLClientConfiguration(PropertiesConfiguration clientConfig)
throws AtlasException, IOException {
//trust settings
Configuration configuration = new Configuration(false);
File sslClientFile = getSSLClientFile();
......
......@@ -36,18 +36,10 @@ import java.util.Properties;
*
*/
public class BaseSecurityTest {
private static final String JAAS_ENTRY =
"%s { \n"
+ " %s required\n"
private static final String JAAS_ENTRY = "%s { \n" + " %s required\n"
// kerberos module
+ " keyTab=\"%s\"\n"
+ " debug=true\n"
+ " principal=\"%s\"\n"
+ " useKeyTab=true\n"
+ " useTicketCache=false\n"
+ " doNotPrompt=true\n"
+ " storeKey=true;\n"
+ "}; \n";
+ " keyTab=\"%s\"\n" + " debug=true\n" + " principal=\"%s\"\n" + " useKeyTab=true\n"
+ " useTicketCache=false\n" + " doNotPrompt=true\n" + " storeKey=true;\n" + "}; \n";
protected MiniKdc kdc;
protected String getWarPath() {
......@@ -56,8 +48,8 @@ public class BaseSecurityTest {
}
protected void generateTestProperties(Properties props) throws ConfigurationException, IOException {
PropertiesConfiguration config = new PropertiesConfiguration(System.getProperty("user.dir") +
"/../src/conf/application.properties");
PropertiesConfiguration config =
new PropertiesConfiguration(System.getProperty("user.dir") + "/../src/conf/application.properties");
for (String propName : props.stringPropertyNames()) {
config.setProperty(propName, props.getProperty(propName));
}
......@@ -88,20 +80,11 @@ public class BaseSecurityTest {
return kdcWorkDir;
}
public String createJAASEntry(
String context,
String principal,
File keytab) {
public String createJAASEntry(String context, String principal, File keytab) {
String keytabpath = keytab.getAbsolutePath();
// fix up for windows; no-op on unix
keytabpath = keytabpath.replace('\\', '/');
return String.format(
Locale.ENGLISH,
JAAS_ENTRY,
context,
getKerberosAuthModuleForJVM(),
keytabpath,
principal);
return String.format(Locale.ENGLISH, JAAS_ENTRY, context, getKerberosAuthModuleForJVM(), keytabpath, principal);
}
protected String getKerberosAuthModuleForJVM() {
......@@ -119,10 +102,7 @@ public class BaseSecurityTest {
protected File createKeytab(MiniKdc kdc, File kdcWorkDir, String principal, String filename) throws Exception {
File keytab = new File(kdcWorkDir, filename);
kdc.createPrincipal(keytab,
principal,
principal + "/localhost",
principal + "/127.0.0.1");
kdc.createPrincipal(keytab, principal, principal + "/localhost", principal + "/127.0.0.1");
return keytab;
}
}
......@@ -97,7 +97,7 @@
<MaxPermGen>512m</MaxPermGen>
<SnapshotsId>apache.snapshots.repo</SnapshotsId>
<SnapshotsName>Apache Snapshot Repository</SnapshotsName>
<SnapshotsUrl> https://repository.apache.org/content/groups/snapshots</SnapshotsUrl>
<SnapshotsUrl>https://repository.apache.org/content/groups/snapshots</SnapshotsUrl>
<StagingId>apache-staging</StagingId>
<StagingName>Apache Release Distribution Repository</StagingName>
<StagingUrl>https://repository.apache.org/content/groups/staging</StagingUrl>
......@@ -110,20 +110,28 @@
<profile>
<id>Windows</id>
<activation>
<os><family>windows</family></os>
<os>
<family>windows</family>
</os>
</activation>
<properties>
<python.path.l>${project.basedir}\src\bin;${project.basedir}\src\test\python\scripts;${project.basedir}\src\test\mock</python.path.l>
<python.path.l>
${project.basedir}\src\bin;${project.basedir}\src\test\python\scripts;${project.basedir}\src\test\mock
</python.path.l>
</properties>
</profile>
<profile>
<id>Linux</id>
<activation>
<os><family>!windows</family></os>
<os>
<family>!windows</family>
</os>
</activation>
<properties>
<python.path.l>${project.basedir}/src/bin:${project.basedir}/src/test/mock:${project.basedir}/src/test/python/scripts</python.path.l>
<python.path.l>
${project.basedir}/src/bin:${project.basedir}/src/test/mock:${project.basedir}/src/test/python/scripts
</python.path.l>
</properties>
</profile>
</profiles>
......@@ -913,7 +921,8 @@
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<argLine>-Djava.awt.headless=true -Dproject.version=${project.version}
-Dhadoop.tmp.dir=${project.build.directory}/tmp-hadoop-${user.name}
-Xmx1024m -XX:MaxPermSize=512m</argLine>
-Xmx1024m -XX:MaxPermSize=512m
</argLine>
</configuration>
<dependencies>
<dependency>
......
......@@ -22,5 +22,7 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD)
public @interface GraphTransaction {}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface GraphTransaction {
}
......@@ -42,7 +42,7 @@ public class GraphTransactionInterceptor implements MethodInterceptor {
titanGraph.commit();
LOG.debug("graph commit");
return response;
} catch (Throwable t){
} catch (Throwable t) {
titanGraph.rollback();
LOG.error("graph rollback due to exception ", t);
throw t;
......
......@@ -18,8 +18,6 @@
package org.apache.atlas;
import com.google.inject.Provider;
import com.google.inject.TypeLiteral;
import com.google.inject.matcher.Matchers;
import com.google.inject.multibindings.Multibinder;
import com.google.inject.throwingproviders.ThrowingProviderBinder;
......@@ -28,7 +26,6 @@ import org.aopalliance.intercept.MethodInterceptor;
import org.apache.atlas.discovery.DiscoveryService;
import org.apache.atlas.discovery.HiveLineageService;
import org.apache.atlas.discovery.LineageService;
import org.apache.atlas.discovery.SearchIndexer;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.listener.TypesChangeListener;
import org.apache.atlas.repository.MetadataRepository;
......@@ -49,9 +46,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
@Override
protected void configure() {
// special wiring for Titan Graph
ThrowingProviderBinder.create(binder())
.bind(GraphProvider.class, TitanGraph.class)
.to(TitanGraphProvider.class)
ThrowingProviderBinder.create(binder()).bind(GraphProvider.class, TitanGraph.class).to(TitanGraphProvider.class)
.asEagerSingleton();
// allow for dynamic binding of the metadata repo & graph service
......@@ -62,7 +57,8 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// bind the ITypeStore interface to an implementation
bind(ITypeStore.class).to(GraphBackedTypeStore.class).asEagerSingleton();
Multibinder<TypesChangeListener> typesChangeListenerBinder = Multibinder.newSetBinder(binder(), TypesChangeListener.class);
Multibinder<TypesChangeListener> typesChangeListenerBinder =
Multibinder.newSetBinder(binder(), TypesChangeListener.class);
typesChangeListenerBinder.addBinding().to(GraphBackedSearchIndexer.class);
// bind the MetadataService interface to an implementation
......
......@@ -19,8 +19,8 @@
package org.apache.atlas.discovery;
import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.AtlasException;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.ParamChecker;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.discovery.graph.DefaultGraphPersistenceStrategy;
......@@ -65,20 +65,14 @@ public class HiveLineageService implements LineageService {
// todo - externalize this using type system - dog food
try {
PropertiesConfiguration conf = PropertiesUtil.getApplicationProperties();
HIVE_TABLE_TYPE_NAME =
conf.getString("atlas.lineage.hive.table.type.name", "DataSet");
HIVE_PROCESS_TYPE_NAME =
conf.getString("atlas.lineage.hive.process.type.name", "Process");
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME =
conf.getString("atlas.lineage.hive.process.inputs.name", "inputs");
HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME =
conf.getString("atlas.lineage.hive.process.outputs.name", "outputs");
HIVE_TABLE_SCHEMA_QUERY = conf.getString(
"atlas.lineage.hive.table.schema.query",
"hive_table where name=\"%s\", columns");
HIVE_TABLE_EXISTS_QUERY = conf.getString(
"atlas.lineage.hive.table.exists.query",
HIVE_TABLE_TYPE_NAME = conf.getString("atlas.lineage.hive.table.type.name", "DataSet");
HIVE_PROCESS_TYPE_NAME = conf.getString("atlas.lineage.hive.process.type.name", "Process");
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME = conf.getString("atlas.lineage.hive.process.inputs.name", "inputs");
HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME = conf.getString("atlas.lineage.hive.process.outputs.name", "outputs");
HIVE_TABLE_SCHEMA_QUERY =
conf.getString("atlas.lineage.hive.table.schema.query", "hive_table where name=\"%s\", columns");
HIVE_TABLE_EXISTS_QUERY = conf.getString("atlas.lineage.hive.table.exists.query",
"from " + HIVE_TABLE_TYPE_NAME + " where name=\"%s\"");
} catch (AtlasException e) {
throw new RuntimeException(e);
......@@ -91,8 +85,7 @@ public class HiveLineageService implements LineageService {
private final GraphBackedDiscoveryService discoveryService;
@Inject
HiveLineageService(GraphProvider<TitanGraph> graphProvider,
MetadataRepository metadataRepository,
HiveLineageService(GraphProvider<TitanGraph> graphProvider, MetadataRepository metadataRepository,
GraphBackedDiscoveryService discoveryService) throws DiscoveryException {
this.titanGraph = graphProvider.get();
this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
......@@ -112,14 +105,13 @@ public class HiveLineageService implements LineageService {
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
HiveWhereUsedQuery outputsQuery = new HiveWhereUsedQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
HiveWhereUsedQuery outputsQuery =
new HiveWhereUsedQuery(HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, Option.empty(),
SELECT_ATTRIBUTES, true, graphPersistenceStrategy, titanGraph);
Expressions.Expression expression = outputsQuery.expr();
LOG.debug("Expression is [" + expression.toString() +"]");
LOG.debug("Expression is [" + expression.toString() + "]");
try {
return discoveryService.evaluate(expression).toJson();
} catch (Exception e) { // unable to catch ExpressionException
......@@ -140,11 +132,10 @@ public class HiveLineageService implements LineageService {
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
HiveWhereUsedQuery outputsQuery = new HiveWhereUsedQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
HiveWhereUsedQuery outputsQuery =
new HiveWhereUsedQuery(HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, Option.empty(),
SELECT_ATTRIBUTES, true, graphPersistenceStrategy, titanGraph);
return outputsQuery.graph().toInstanceJson();
}
......@@ -161,14 +152,12 @@ public class HiveLineageService implements LineageService {
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
HiveLineageQuery inputsQuery = new HiveLineageQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
HiveLineageQuery inputsQuery = new HiveLineageQuery(HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, Option.empty(),
SELECT_ATTRIBUTES, true, graphPersistenceStrategy, titanGraph);
Expressions.Expression expression = inputsQuery.expr();
LOG.debug("Expression is [" + expression.toString() +"]");
LOG.debug("Expression is [" + expression.toString() + "]");
try {
return discoveryService.evaluate(expression).toJson();
} catch (Exception e) { // unable to catch ExpressionException
......@@ -189,11 +178,9 @@ public class HiveLineageService implements LineageService {
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
HiveLineageQuery inputsQuery = new HiveLineageQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
HiveLineageQuery inputsQuery = new HiveLineageQuery(HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, Option.empty(),
SELECT_ATTRIBUTES, true, graphPersistenceStrategy, titanGraph);
return inputsQuery.graph().toInstanceJson();
}
......
......@@ -19,7 +19,6 @@
package org.apache.atlas.discovery;
import org.apache.atlas.listener.TypesChangeListener;
import org.apache.atlas.repository.IndexException;
/**
......
......@@ -47,8 +47,7 @@ import java.util.List;
*/
public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategies {
private static final Logger LOG = LoggerFactory
.getLogger(DefaultGraphPersistenceStrategy.class);
private static final Logger LOG = LoggerFactory.getLogger(DefaultGraphPersistenceStrategy.class);
private final GraphBackedMetadataRepository metadataRepository;
......@@ -124,14 +123,12 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
TypeSystem.IdType idType = TypeSystem.getInstance().getIdType();
if (dataType.getName().equals(idType.getName())) {
structInstance.set(idType.typeNameAttrName(),
structVertex.getProperty(typeAttributeName()));
structInstance.set(idType.idAttrName(),
structVertex.getProperty(idAttributeName()));
structInstance.set(idType.typeNameAttrName(), structVertex.getProperty(typeAttributeName()));
structInstance.set(idType.idAttrName(), structVertex.getProperty(idAttributeName()));
} else {
metadataRepository.getGraphToInstanceMapper().mapVertexToInstance(
structVertex, structInstance, structType.fieldMapping().fields);
metadataRepository.getGraphToInstanceMapper()
.mapVertexToInstance(structVertex, structInstance, structType.fieldMapping().fields);
}
return dataType.convert(structInstance, Multiplicity.OPTIONAL);
......@@ -143,21 +140,19 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
// trait. for now just loading the trait struct.
// metadataRepository.getGraphToInstanceMapper().mapVertexToTraitInstance(
// traitVertex, dataType.getName(), , traitType, traitInstance);
metadataRepository.getGraphToInstanceMapper().mapVertexToInstance(
traitVertex, traitInstance, traitType.fieldMapping().fields);
metadataRepository.getGraphToInstanceMapper()
.mapVertexToInstance(traitVertex, traitInstance, traitType.fieldMapping().fields);
break;
case CLASS:
TitanVertex classVertex = (TitanVertex) value;
ITypedReferenceableInstance classInstance =
metadataRepository.getGraphToInstanceMapper().mapGraphToTypedInstance(
classVertex.<String>getProperty(Constants.GUID_PROPERTY_KEY),
ITypedReferenceableInstance classInstance = metadataRepository.getGraphToInstanceMapper()
.mapGraphToTypedInstance(classVertex.<String>getProperty(Constants.GUID_PROPERTY_KEY),
classVertex);
return dataType.convert(classInstance, Multiplicity.OPTIONAL);
default:
throw new UnsupportedOperationException(
"Load for type " + dataType + "is not supported");
throw new UnsupportedOperationException("Load for type " + dataType + "is not supported");
}
} catch (AtlasException e) {
LOG.error("error while constructing an instance", e);
......@@ -168,9 +163,8 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
@Override
public String edgeLabel(TypeUtils.FieldInfo fInfo) {
return fInfo.reverseDataType() == null
? edgeLabel(fInfo.dataType(), fInfo.attrInfo())
: edgeLabel(fInfo.reverseDataType(), fInfo.attrInfo());
return fInfo.reverseDataType() == null ? edgeLabel(fInfo.dataType(), fInfo.attrInfo()) :
edgeLabel(fInfo.reverseDataType(), fInfo.attrInfo());
}
@Override
......@@ -184,13 +178,19 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
}
@Override
public String instanceToTraitEdgeDirection() { return "out"; }
public String instanceToTraitEdgeDirection() {
return "out";
}
@Override
public String traitToInstanceEdgeDirection() { return "in"; }
public String traitToInstanceEdgeDirection() {
return "in";
}
@Override
public String idAttributeName() { return metadataRepository.getIdAttributeName(); }
public String idAttributeName() {
return metadataRepository.getIdAttributeName();
}
@Override
public scala.collection.Seq<String> typeTestExpression(String typeName, IntSequence intSeq) {
......
......@@ -23,8 +23,8 @@ import com.thinkaurelius.titan.core.TitanIndexQuery;
import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.discovery.DiscoveryException;
import org.apache.atlas.discovery.DiscoveryService;
import org.apache.atlas.query.Expressions;
......@@ -71,8 +71,8 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
public final static String SCORE = "score";
@Inject
GraphBackedDiscoveryService(GraphProvider<TitanGraph> graphProvider,
MetadataRepository metadataRepository) throws DiscoveryException {
GraphBackedDiscoveryService(GraphProvider<TitanGraph> graphProvider, MetadataRepository metadataRepository)
throws DiscoveryException {
this.titanGraph = graphProvider.get();
this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
}
......@@ -143,8 +143,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
public GremlinQueryResult evaluate(Expressions.Expression expression) {
Expressions.Expression validatedExpression = QueryProcessor.validate(expression);
GremlinQuery gremlinQuery =
new GremlinTranslator(validatedExpression, graphPersistenceStrategy).translate();
GremlinQuery gremlinQuery = new GremlinTranslator(validatedExpression, graphPersistenceStrategy).translate();
LOG.debug("Query = {}", validatedExpression);
LOG.debug("Expression Tree = {}", validatedExpression.treeString());
LOG.debug("Gremlin Query = {}", gremlinQuery.queryStr());
......@@ -162,8 +161,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
*/
@Override
@GraphTransaction
public List<Map<String, String>> searchByGremlin(String gremlinQuery)
throws DiscoveryException {
public List<Map<String, String>> searchByGremlin(String gremlinQuery) throws DiscoveryException {
LOG.info("Executing gremlin query={}", gremlinQuery);
ScriptEngineManager manager = new ScriptEngineManager();
ScriptEngine engine = manager.getEngineByName("gremlin-groovy");
......@@ -189,8 +187,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
Map<String, String> oRow = new HashMap<>();
if (r instanceof Map) {
@SuppressWarnings("unchecked")
Map<Object, Object> iRow = (Map) r;
@SuppressWarnings("unchecked") Map<Object, Object> iRow = (Map) r;
for (Map.Entry e : iRow.entrySet()) {
Object k = e.getKey();
Object v = e.getValue();
......
......@@ -37,8 +37,7 @@ public class IndexException extends AtlasException {
super(cause);
}
public IndexException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
public IndexException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
......
......@@ -123,6 +123,7 @@ public interface MetadataRepository {
// Trait management functions
/**
* Gets the list of trait names for a given entity represented by a guid.
*
......@@ -139,8 +140,7 @@ public interface MetadataRepository {
* @param traitInstance trait instance that needs to be added to entity
* @throws RepositoryException
*/
void addTrait(String guid,
ITypedStruct traitInstance) throws RepositoryException;
void addTrait(String guid, ITypedStruct traitInstance) throws RepositoryException;
/**
* Deletes a given trait from an existing entity represented by a guid.
......@@ -149,8 +149,7 @@ public interface MetadataRepository {
* @param traitNameToBeDeleted name of the trait
* @throws RepositoryException
*/
void deleteTrait(String guid,
String traitNameToBeDeleted) throws RepositoryException;
void deleteTrait(String guid, String traitNameToBeDeleted) throws RepositoryException;
/**
* Adds the property to the entity that corresponds to the GUID
......
......@@ -40,8 +40,7 @@ public class RepositoryException extends AtlasException {
super(cause);
}
public RepositoryException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
public RepositoryException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
......@@ -26,8 +26,8 @@ import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.AtlasException;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.EntityNotFoundException;
import org.apache.atlas.repository.MetadataRepository;
......@@ -73,25 +73,21 @@ import java.util.concurrent.atomic.AtomicInteger;
@Singleton
public class GraphBackedMetadataRepository implements MetadataRepository {
private static final Logger LOG =
LoggerFactory.getLogger(GraphBackedMetadataRepository.class);
private static final Logger LOG = LoggerFactory.getLogger(GraphBackedMetadataRepository.class);
private static final String FULL_TEXT_DELIMITER = " ";
private static final String EDGE_LABEL_PREFIX = "__";
private final AtomicInteger ID_SEQ = new AtomicInteger(0);
private final TypedInstanceToGraphMapper instanceToGraphMapper
= new TypedInstanceToGraphMapper();
private final GraphToTypedInstanceMapper graphToInstanceMapper
= new GraphToTypedInstanceMapper();
private final TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper();
private final GraphToTypedInstanceMapper graphToInstanceMapper = new GraphToTypedInstanceMapper();
private final TypeSystem typeSystem;
private final TitanGraph titanGraph;
@Inject
public GraphBackedMetadataRepository(
GraphProvider<TitanGraph> graphProvider) throws AtlasException {
public GraphBackedMetadataRepository(GraphProvider<TitanGraph> graphProvider) throws AtlasException {
this.typeSystem = TypeSystem.getInstance();
this.titanGraph = graphProvider.get();
......@@ -126,8 +122,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
@Override
public String getFieldNameInVertex(IDataType<?> dataType,
AttributeInfo aInfo) throws AtlasException {
public String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) throws AtlasException {
return getQualifiedName(dataType, aInfo.name);
}
......@@ -140,8 +135,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return EDGE_LABEL_PREFIX + typeName + "." + attrName;
}
public String getEdgeLabel(ITypedInstance typedInstance,
AttributeInfo aInfo) throws AtlasException {
public String getEdgeLabel(ITypedInstance typedInstance, AttributeInfo aInfo) throws AtlasException {
IDataType dataType = typeSystem.getDataType(IDataType.class, typedInstance.getTypeName());
return getEdgeLabel(dataType, aInfo);
}
......@@ -177,8 +171,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
Vertex instanceVertex = GraphHelper.findVertexByGUID(titanGraph, guid);
if (instanceVertex == null) {
LOG.debug("Could not find a vertex for guid={}", guid);
throw new EntityNotFoundException(
"Could not find an entity in the repository for guid: " + guid);
throw new EntityNotFoundException("Could not find an entity in the repository for guid: " + guid);
}
return instanceVertex;
......@@ -188,8 +181,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
@GraphTransaction
public List<String> getEntityList(String entityType) throws RepositoryException {
LOG.info("Retrieving entity list for type={}", entityType);
GraphQuery query = titanGraph.query()
.has(Constants.ENTITY_TYPE_PROPERTY_KEY, entityType);
GraphQuery query = titanGraph.query().has(Constants.ENTITY_TYPE_PROPERTY_KEY, entityType);
Iterator<Vertex> results = query.vertices().iterator();
if (!results.hasNext()) {
return Collections.emptyList();
......@@ -221,8 +213,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
public List<String> getTraitNames(Vertex entityVertex) {
ArrayList<String> traits = new ArrayList<>();
for (TitanProperty property : ((TitanVertex) entityVertex)
.getProperties(Constants.TRAIT_NAMES_PROPERTY_KEY)) {
for (TitanProperty property : ((TitanVertex) entityVertex).getProperties(Constants.TRAIT_NAMES_PROPERTY_KEY)) {
traits.add((String) property.getValue());
}
......@@ -238,8 +229,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
*/
@Override
@GraphTransaction
public void addTrait(String guid,
ITypedStruct traitInstance) throws RepositoryException {
public void addTrait(String guid, ITypedStruct traitInstance) throws RepositoryException {
Preconditions.checkNotNull(traitInstance, "Trait instance cannot be null");
final String traitName = traitInstance.getTypeName();
LOG.info("Adding a new trait={} for entity={}", traitName, guid);
......@@ -249,13 +239,12 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
// add the trait instance as a new vertex
final String typeName = getTypeName(instanceVertex);
instanceToGraphMapper.mapTraitInstanceToVertex(
traitInstance, getIdFromVertex(typeName, instanceVertex),
typeName, instanceVertex, Collections.<Id, Vertex>emptyMap());
instanceToGraphMapper
.mapTraitInstanceToVertex(traitInstance, getIdFromVertex(typeName, instanceVertex), typeName,
instanceVertex, Collections.<Id, Vertex>emptyMap());
// update the traits in entity once adding trait instance is successful
((TitanVertex) instanceVertex)
.addProperty(Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
((TitanVertex) instanceVertex).addProperty(Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
} catch (RepositoryException e) {
throw e;
......@@ -273,22 +262,20 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
*/
@Override
@GraphTransaction
public void deleteTrait(String guid, String traitNameToBeDeleted)
throws RepositoryException {
public void deleteTrait(String guid, String traitNameToBeDeleted) throws RepositoryException {
LOG.info("Deleting trait={} from entity={}", traitNameToBeDeleted, guid);
try {
Vertex instanceVertex = getVertexForGUID(guid);
List<String> traitNames = getTraitNames(instanceVertex);
if (!traitNames.contains(traitNameToBeDeleted)) {
throw new EntityNotFoundException("Could not find trait=" + traitNameToBeDeleted
+ " in the repository for entity: " + guid);
throw new EntityNotFoundException(
"Could not find trait=" + traitNameToBeDeleted + " in the repository for entity: " + guid);
}
final String entityTypeName = getTypeName(instanceVertex);
String relationshipLabel = getEdgeLabel(entityTypeName, traitNameToBeDeleted);
Iterator<Edge> results = instanceVertex.getEdges(
Direction.OUT, relationshipLabel).iterator();
Iterator<Edge> results = instanceVertex.getEdges(Direction.OUT, relationshipLabel).iterator();
if (results.hasNext()) { // there should only be one edge for this label
final Edge traitEdge = results.next();
final Vertex traitVertex = traitEdge.getVertex(Direction.IN);
......@@ -317,15 +304,13 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
// add it back again
for (String traitName : traitNames) {
((TitanVertex) instanceVertex).addProperty(
Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
((TitanVertex) instanceVertex).addProperty(Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
}
}
@Override
@GraphTransaction
public void updateEntity(String guid,
String property, String value) throws RepositoryException {
public void updateEntity(String guid, String property, String value) throws RepositoryException {
LOG.info("Adding property {} for entity guid {}", property, guid);
try {
......@@ -336,8 +321,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
ClassType type = typeSystem.getDataType(ClassType.class, typeName);
AttributeInfo attributeInfo = type.fieldMapping.fields.get(property);
if (attributeInfo == null) {
throw new AtlasException("Invalid property "
+ property + " for entity " + typeName);
throw new AtlasException("Invalid property " + property + " for entity " + typeName);
}
DataTypes.TypeCategory attrTypeCategory = attributeInfo.dataType().getTypeCategory();
......@@ -351,9 +335,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
throw new RepositoryException("Update of " + attrTypeCategory + " is not supported");
}
instanceToGraphMapper.mapAttributesToVertex(getIdFromVertex(typeName, instanceVertex),
instance, instanceVertex, new HashMap<Id, Vertex>(),
attributeInfo, attributeInfo.dataType());
instanceToGraphMapper
.mapAttributesToVertex(getIdFromVertex(typeName, instanceVertex), instance, instanceVertex,
new HashMap<Id, Vertex>(), attributeInfo, attributeInfo.dataType());
} catch (RepositoryException e) {
throw e;
} catch (Exception e) {
......@@ -362,10 +346,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
public Id getIdFromVertex(String dataTypeName, Vertex vertex) {
return new Id(
vertex.<String>getProperty(Constants.GUID_PROPERTY_KEY),
vertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY),
dataTypeName);
return new Id(vertex.<String>getProperty(Constants.GUID_PROPERTY_KEY),
vertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY), dataTypeName);
}
String getTypeName(Vertex instanceVertex) {
......@@ -373,17 +355,13 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
String getQualifiedName(ITypedInstance typedInstance,
AttributeInfo attributeInfo) throws AtlasException {
IDataType dataType = typeSystem.getDataType(
IDataType.class, typedInstance.getTypeName());
String getQualifiedName(ITypedInstance typedInstance, AttributeInfo attributeInfo) throws AtlasException {
IDataType dataType = typeSystem.getDataType(IDataType.class, typedInstance.getTypeName());
return getQualifiedName(dataType, attributeInfo.name);
}
String getQualifiedName(IDataType dataType,
String attributeName) throws AtlasException {
return dataType.getTypeCategory() == DataTypes.TypeCategory.STRUCT
? dataType.getName() + "." + attributeName
String getQualifiedName(IDataType dataType, String attributeName) throws AtlasException {
return dataType.getTypeCategory() == DataTypes.TypeCategory.STRUCT ? dataType.getName() + "." + attributeName
// else class or trait
: ((HierarchicalType) dataType).getQualifiedName(attributeName);
}
......@@ -422,8 +400,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
if (ref != null) {
if (idToInstanceMap.containsKey(id)) { // Oops
throw new RepositoryException(String.format(
"Unexpected internal error: Id %s processed again", id));
throw new RepositoryException(
String.format("Unexpected internal error: Id %s processed again", id));
}
idToInstanceMap.put(id, ref);
......@@ -432,8 +410,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
}
private void createVerticesForClassTypes(
List<ITypedReferenceableInstance> newInstances) throws AtlasException {
private void createVerticesForClassTypes(List<ITypedReferenceableInstance> newInstances) throws AtlasException {
for (ITypedReferenceableInstance typedInstance : newInstances) {
final Id id = typedInstance.getId();
if (!idToVertexMap.containsKey(id)) {
......@@ -441,11 +418,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
if (id.isAssigned()) { // has a GUID
instanceVertex = GraphHelper.findVertexByGUID(titanGraph, id.id);
} else {
ClassType classType = typeSystem.getDataType(
ClassType.class, typedInstance.getTypeName());
instanceVertex = GraphHelper.createVertexWithIdentity(
titanGraph, typedInstance,
classType.getAllSuperTypeNames());
ClassType classType = typeSystem.getDataType(ClassType.class, typedInstance.getTypeName());
instanceVertex = GraphHelper
.createVertexWithIdentity(titanGraph, typedInstance, classType.getAllSuperTypeNames());
}
idToVertexMap.put(id, instanceVertex);
......@@ -456,8 +431,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
private final class TypedInstanceToGraphMapper {
private String mapTypedInstanceToGraph(IReferenceableInstance typedInstance)
throws AtlasException {
private String mapTypedInstanceToGraph(IReferenceableInstance typedInstance) throws AtlasException {
EntityProcessor entityProcessor = new EntityProcessor();
try {
......@@ -475,8 +449,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
private void addFullTextProperty(EntityProcessor entityProcessor,
List<ITypedReferenceableInstance> newTypedInstances)
throws AtlasException {
List<ITypedReferenceableInstance> newTypedInstances) throws AtlasException {
for (ITypedReferenceableInstance typedInstance : newTypedInstances) { // Traverse
Id id = typedInstance.getId();
......@@ -486,35 +459,31 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
}
private String getFullTextForVertex(Vertex instanceVertex,
boolean followReferences) throws AtlasException {
private String getFullTextForVertex(Vertex instanceVertex, boolean followReferences) throws AtlasException {
String guid = instanceVertex.getProperty(Constants.GUID_PROPERTY_KEY);
ITypedReferenceableInstance typedReference =
graphToInstanceMapper.mapGraphToTypedInstance(guid, instanceVertex);
String fullText = getFullTextForInstance(typedReference, followReferences);
StringBuilder fullTextBuilder = new StringBuilder(
typedReference.getTypeName()).append(FULL_TEXT_DELIMITER).append(fullText);
StringBuilder fullTextBuilder =
new StringBuilder(typedReference.getTypeName()).append(FULL_TEXT_DELIMITER).append(fullText);
List<String> traits = typedReference.getTraits();
for (String traitName : traits) {
String traitText = getFullTextForInstance(
(ITypedInstance) typedReference.getTrait(traitName), false);
fullTextBuilder.append(FULL_TEXT_DELIMITER)
.append(traitName)
.append(FULL_TEXT_DELIMITER)
String traitText = getFullTextForInstance((ITypedInstance) typedReference.getTrait(traitName), false);
fullTextBuilder.append(FULL_TEXT_DELIMITER).append(traitName).append(FULL_TEXT_DELIMITER)
.append(traitText);
}
return fullTextBuilder.toString();
}
private String getFullTextForAttribute(IDataType type, Object value,
boolean followReferences) throws AtlasException {
private String getFullTextForAttribute(IDataType type, Object value, boolean followReferences)
throws AtlasException {
switch (type.getTypeCategory()) {
case PRIMITIVE:
return String.valueOf(value);
case ENUM:
return ((EnumValue)value).value;
return ((EnumValue) value).value;
case ARRAY:
StringBuilder fullText = new StringBuilder();
......@@ -523,9 +492,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
for (Object element : list) {
String elemFullText = getFullTextForAttribute(elemType, element, false);
if (StringUtils.isNotEmpty(elemFullText))
if (StringUtils.isNotEmpty(elemFullText)) {
fullText = fullText.append(FULL_TEXT_DELIMITER).append(elemFullText);
}
}
return fullText.toString();
case MAP:
......@@ -568,8 +538,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return null;
}
private String getFullTextForInstance(ITypedInstance typedInstance,
boolean followReferences) throws AtlasException {
private String getFullTextForInstance(ITypedInstance typedInstance, boolean followReferences)
throws AtlasException {
StringBuilder fullText = new StringBuilder();
for (AttributeInfo attributeInfo : typedInstance.fieldMapping().fields.values()) {
Object attrValue = typedInstance.get(attributeInfo.name);
......@@ -577,11 +547,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
continue;
}
String attrFullText = getFullTextForAttribute(
attributeInfo.dataType(), attrValue, followReferences);
String attrFullText = getFullTextForAttribute(attributeInfo.dataType(), attrValue, followReferences);
if (StringUtils.isNotEmpty(attrFullText)) {
fullText = fullText.append(FULL_TEXT_DELIMITER).append(attributeInfo.name)
.append(FULL_TEXT_DELIMITER).append(attrFullText);
fullText =
fullText.append(FULL_TEXT_DELIMITER).append(attributeInfo.name).append(FULL_TEXT_DELIMITER)
.append(attrFullText);
}
}
return fullText.toString();
......@@ -599,10 +569,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
for (IReferenceableInstance transientInstance : entityProcessor.idToInstanceMap.values()) {
LOG.debug("Discovered instance {}", transientInstance.getTypeName());
try {
ClassType cT = typeSystem.getDataType(
ClassType.class, transientInstance.getTypeName());
ITypedReferenceableInstance newInstance = cT.convert(
transientInstance, Multiplicity.REQUIRED);
ClassType cT = typeSystem.getDataType(ClassType.class, transientInstance.getTypeName());
ITypedReferenceableInstance newInstance = cT.convert(transientInstance, Multiplicity.REQUIRED);
newTypedInstances.add(newInstance);
// Now replace old references with new Ids
......@@ -611,18 +579,15 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
} catch (AtlasException me) {
throw new RepositoryException(
String.format("Failed to create Instance(id = %s",
transientInstance.getId()), me);
String.format("Failed to create Instance(id = %s", transientInstance.getId()), me);
}
}
return newTypedInstances;
}
private String addDiscoveredInstances(IReferenceableInstance entity,
EntityProcessor entityProcessor,
List<ITypedReferenceableInstance> newTypedInstances)
throws AtlasException {
private String addDiscoveredInstances(IReferenceableInstance entity, EntityProcessor entityProcessor,
List<ITypedReferenceableInstance> newTypedInstances) throws AtlasException {
String typedInstanceGUID = null;
for (ITypedReferenceableInstance typedInstance : newTypedInstances) { // Traverse over newInstances
......@@ -636,22 +601,19 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
Vertex instanceVertex = entityProcessor.idToVertexMap.get(id);
// add the attributes for the instance
ClassType classType = typeSystem.getDataType(
ClassType.class, typedInstance.getTypeName());
ClassType classType = typeSystem.getDataType(ClassType.class, typedInstance.getTypeName());
final Map<String, AttributeInfo> fields = classType.fieldMapping().fields;
mapInstanceToVertex(
id, typedInstance, instanceVertex, fields, entityProcessor.idToVertexMap);
mapInstanceToVertex(id, typedInstance, instanceVertex, fields, entityProcessor.idToVertexMap);
for (String traitName : typedInstance.getTraits()) {
LOG.debug("mapping trait {}", traitName);
((TitanVertex) instanceVertex)
.addProperty(Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
((TitanVertex) instanceVertex).addProperty(Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
ITypedStruct traitInstance = (ITypedStruct) typedInstance.getTrait(traitName);
// add the attributes for the trait instance
mapTraitInstanceToVertex(traitInstance, typedInstance,
instanceVertex, entityProcessor.idToVertexMap);
mapTraitInstanceToVertex(traitInstance, typedInstance, instanceVertex,
entityProcessor.idToVertexMap);
}
if (typedInstance.getId() == entity.getId()) { // save the guid for return
......@@ -663,22 +625,17 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
private void mapInstanceToVertex(Id id, ITypedInstance typedInstance, Vertex instanceVertex,
Map<String, AttributeInfo> fields,
Map<Id, Vertex> idToVertexMap) throws AtlasException {
LOG.debug("Mapping instance {} of {} to vertex {}",
typedInstance, typedInstance.getTypeName(), instanceVertex);
Map<String, AttributeInfo> fields, Map<Id, Vertex> idToVertexMap) throws AtlasException {
LOG.debug("Mapping instance {} of {} to vertex {}", typedInstance, typedInstance.getTypeName(),
instanceVertex);
for (AttributeInfo attributeInfo : fields.values()) {
final IDataType dataType = attributeInfo.dataType();
mapAttributesToVertex(id, typedInstance, instanceVertex,
idToVertexMap, attributeInfo, dataType);
mapAttributesToVertex(id, typedInstance, instanceVertex, idToVertexMap, attributeInfo, dataType);
}
}
private void mapAttributesToVertex(Id id, ITypedInstance typedInstance,
Vertex instanceVertex,
Map<Id, Vertex> idToVertexMap,
AttributeInfo attributeInfo,
IDataType dataType) throws AtlasException {
private void mapAttributesToVertex(Id id, ITypedInstance typedInstance, Vertex instanceVertex,
Map<Id, Vertex> idToVertexMap, AttributeInfo attributeInfo, IDataType dataType) throws AtlasException {
Object attrValue = typedInstance.get(attributeInfo.name);
LOG.debug("mapping attribute {} = {}", attributeInfo.name, attrValue);
final String propertyName = getQualifiedName(typedInstance, attributeInfo);
......@@ -694,27 +651,25 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
case ENUM:
//handles both int and string for enum
EnumValue enumValue = (EnumValue) dataType.convert(typedInstance.get(attributeInfo.name),
Multiplicity.REQUIRED);
EnumValue enumValue =
(EnumValue) dataType.convert(typedInstance.get(attributeInfo.name), Multiplicity.REQUIRED);
addProperty(instanceVertex, propertyName, enumValue.value);
break;
case ARRAY:
mapArrayCollectionToVertex(
id, typedInstance, instanceVertex, attributeInfo, idToVertexMap);
mapArrayCollectionToVertex(id, typedInstance, instanceVertex, attributeInfo, idToVertexMap);
break;
case MAP:
mapMapCollectionToVertex(
id, typedInstance, instanceVertex, attributeInfo, idToVertexMap);
mapMapCollectionToVertex(id, typedInstance, instanceVertex, attributeInfo, idToVertexMap);
break;
case STRUCT:
Vertex structInstanceVertex = mapStructInstanceToVertex(id,
(ITypedStruct) typedInstance.get(attributeInfo.name), attributeInfo, idToVertexMap);
Vertex structInstanceVertex =
mapStructInstanceToVertex(id, (ITypedStruct) typedInstance.get(attributeInfo.name),
attributeInfo, idToVertexMap);
// add an edge to the newly created vertex from the parent
GraphHelper.addEdge(
titanGraph, instanceVertex, structInstanceVertex, edgeLabel);
GraphHelper.addEdge(titanGraph, instanceVertex, structInstanceVertex, edgeLabel);
break;
case TRAIT:
......@@ -723,23 +678,18 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
case CLASS:
Id referenceId = (Id) typedInstance.get(attributeInfo.name);
mapClassReferenceAsEdge(
instanceVertex, idToVertexMap, edgeLabel, referenceId);
mapClassReferenceAsEdge(instanceVertex, idToVertexMap, edgeLabel, referenceId);
break;
default:
throw new IllegalArgumentException("Unknown type category: "
+ dataType.getTypeCategory());
throw new IllegalArgumentException("Unknown type category: " + dataType.getTypeCategory());
}
}
private void mapArrayCollectionToVertex(Id id, ITypedInstance typedInstance,
Vertex instanceVertex,
AttributeInfo attributeInfo,
Map<Id, Vertex> idToVertexMap)
throws AtlasException {
LOG.debug("Mapping instance {} to vertex {} for name {}",
typedInstance.getTypeName(), instanceVertex, attributeInfo.name);
private void mapArrayCollectionToVertex(Id id, ITypedInstance typedInstance, Vertex instanceVertex,
AttributeInfo attributeInfo, Map<Id, Vertex> idToVertexMap) throws AtlasException {
LOG.debug("Mapping instance {} to vertex {} for name {}", typedInstance.getTypeName(), instanceVertex,
attributeInfo.name);
List list = (List) typedInstance.get(attributeInfo.name);
if (list == null || list.isEmpty()) {
return;
......@@ -750,8 +700,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
List<String> values = new ArrayList<>(list.size());
for (int index = 0; index < list.size(); index++) {
String entryId = mapCollectionEntryToVertex(id, instanceVertex,
attributeInfo, idToVertexMap, elementType, list.get(index), propertyName);
String entryId =
mapCollectionEntryToVertex(id, instanceVertex, attributeInfo, idToVertexMap, elementType,
list.get(index), propertyName);
values.add(entryId);
}
......@@ -759,15 +710,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
addProperty(instanceVertex, propertyName, values);
}
private void mapMapCollectionToVertex(Id id, ITypedInstance typedInstance,
Vertex instanceVertex,
AttributeInfo attributeInfo,
Map<Id, Vertex> idToVertexMap)
throws AtlasException {
LOG.debug("Mapping instance {} to vertex {} for name {}",
typedInstance.getTypeName(), instanceVertex, attributeInfo.name);
@SuppressWarnings("unchecked")
Map<Object, Object> collection =
private void mapMapCollectionToVertex(Id id, ITypedInstance typedInstance, Vertex instanceVertex,
AttributeInfo attributeInfo, Map<Id, Vertex> idToVertexMap) throws AtlasException {
LOG.debug("Mapping instance {} to vertex {} for name {}", typedInstance.getTypeName(), instanceVertex,
attributeInfo.name);
@SuppressWarnings("unchecked") Map<Object, Object> collection =
(Map<Object, Object>) typedInstance.get(attributeInfo.name);
if (collection == null || collection.isEmpty()) {
return;
......@@ -777,8 +724,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
IDataType elementType = ((DataTypes.MapType) attributeInfo.dataType()).getValueType();
for (Map.Entry entry : collection.entrySet()) {
String myPropertyName = propertyName + "." + entry.getKey().toString();
String value = mapCollectionEntryToVertex(id, instanceVertex, attributeInfo,
idToVertexMap, elementType, entry.getValue(), myPropertyName);
String value = mapCollectionEntryToVertex(id, instanceVertex, attributeInfo, idToVertexMap, elementType,
entry.getValue(), myPropertyName);
addProperty(instanceVertex, myPropertyName, value);
}
......@@ -786,11 +733,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
addProperty(instanceVertex, propertyName, new ArrayList(collection.keySet()));
}
private String mapCollectionEntryToVertex(Id id, Vertex instanceVertex,
AttributeInfo attributeInfo,
Map<Id, Vertex> idToVertexMap,
IDataType elementType, Object value,
String propertyName) throws AtlasException {
private String mapCollectionEntryToVertex(Id id, Vertex instanceVertex, AttributeInfo attributeInfo,
Map<Id, Vertex> idToVertexMap, IDataType elementType, Object value, String propertyName)
throws AtlasException {
final String edgeLabel = EDGE_LABEL_PREFIX + propertyName;
switch (elementType.getTypeCategory()) {
case PRIMITIVE:
......@@ -804,27 +749,24 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return null;
case STRUCT:
Vertex structInstanceVertex = mapStructInstanceToVertex(id,
(ITypedStruct) value, attributeInfo, idToVertexMap);
Vertex structInstanceVertex =
mapStructInstanceToVertex(id, (ITypedStruct) value, attributeInfo, idToVertexMap);
// add an edge to the newly created vertex from the parent
Edge structElementEdge = GraphHelper.addEdge(
titanGraph, instanceVertex, structInstanceVertex, edgeLabel);
Edge structElementEdge =
GraphHelper.addEdge(titanGraph, instanceVertex, structInstanceVertex, edgeLabel);
return structElementEdge.getId().toString();
case CLASS:
Id referenceId = (Id) value;
return mapClassReferenceAsEdge(
instanceVertex, idToVertexMap, edgeLabel, referenceId);
return mapClassReferenceAsEdge(instanceVertex, idToVertexMap, edgeLabel, referenceId);
default:
throw new IllegalArgumentException("Unknown type category: "
+ elementType.getTypeCategory());
throw new IllegalArgumentException("Unknown type category: " + elementType.getTypeCategory());
}
}
private String mapClassReferenceAsEdge(Vertex instanceVertex,
Map<Id, Vertex> idToVertexMap,
String propertyKey, Id id) throws AtlasException {
private String mapClassReferenceAsEdge(Vertex instanceVertex, Map<Id, Vertex> idToVertexMap, String propertyKey,
Id id) throws AtlasException {
if (id != null) {
Vertex referenceVertex;
if (id.isAssigned()) {
......@@ -835,8 +777,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
if (referenceVertex != null) {
// add an edge to the class vertex from the instance
Edge edge = GraphHelper.addEdge(
titanGraph, instanceVertex, referenceVertex, propertyKey);
Edge edge = GraphHelper.addEdge(titanGraph, instanceVertex, referenceVertex, propertyKey);
return String.valueOf(edge.getId());
}
}
......@@ -844,43 +785,36 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return null;
}
private Vertex mapStructInstanceToVertex(Id id, ITypedStruct structInstance,
AttributeInfo attributeInfo,
Map<Id, Vertex> idToVertexMap)
throws AtlasException {
private Vertex mapStructInstanceToVertex(Id id, ITypedStruct structInstance, AttributeInfo attributeInfo,
Map<Id, Vertex> idToVertexMap) throws AtlasException {
// add a new vertex for the struct or trait instance
Vertex structInstanceVertex = GraphHelper.createVertexWithoutIdentity(
titanGraph, structInstance.getTypeName(), id,
Vertex structInstanceVertex = GraphHelper
.createVertexWithoutIdentity(titanGraph, structInstance.getTypeName(), id,
Collections.<String>emptySet()); // no super types for struct type
LOG.debug("created vertex {} for struct {} value {}", structInstanceVertex, attributeInfo.name,
structInstance);
// map all the attributes to this newly created vertex
mapInstanceToVertex(id, structInstance, structInstanceVertex,
structInstance.fieldMapping().fields, idToVertexMap);
mapInstanceToVertex(id, structInstance, structInstanceVertex, structInstance.fieldMapping().fields,
idToVertexMap);
return structInstanceVertex;
}
private void mapTraitInstanceToVertex(ITypedStruct traitInstance,
ITypedReferenceableInstance typedInstance,
Vertex parentInstanceVertex,
Map<Id, Vertex> idToVertexMap)
throws AtlasException {
private void mapTraitInstanceToVertex(ITypedStruct traitInstance, ITypedReferenceableInstance typedInstance,
Vertex parentInstanceVertex, Map<Id, Vertex> idToVertexMap) throws AtlasException {
// add a new vertex for the struct or trait instance
mapTraitInstanceToVertex(traitInstance, typedInstance.getId(),
typedInstance.getTypeName(), parentInstanceVertex, idToVertexMap);
mapTraitInstanceToVertex(traitInstance, typedInstance.getId(), typedInstance.getTypeName(),
parentInstanceVertex, idToVertexMap);
}
private void mapTraitInstanceToVertex(ITypedStruct traitInstance,
Id typedInstanceId, String typedInstanceTypeName,
Vertex parentInstanceVertex,
Map<Id, Vertex> idToVertexMap)
private void mapTraitInstanceToVertex(ITypedStruct traitInstance, Id typedInstanceId,
String typedInstanceTypeName, Vertex parentInstanceVertex, Map<Id, Vertex> idToVertexMap)
throws AtlasException {
// add a new vertex for the struct or trait instance
final String traitName = traitInstance.getTypeName();
Vertex traitInstanceVertex = GraphHelper.createVertexWithoutIdentity(
titanGraph, traitInstance.getTypeName(), typedInstanceId,
Vertex traitInstanceVertex = GraphHelper
.createVertexWithoutIdentity(titanGraph, traitInstance.getTypeName(), typedInstanceId,
typeSystem.getDataType(TraitType.class, traitName).getAllSuperTypeNames());
LOG.debug("created vertex {} for trait {}", traitInstanceVertex, traitName);
......@@ -890,12 +824,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
// add an edge to the newly created vertex from the parent
String relationshipLabel = getEdgeLabel(typedInstanceTypeName, traitName);
GraphHelper.addEdge(
titanGraph, parentInstanceVertex, traitInstanceVertex, relationshipLabel);
GraphHelper.addEdge(titanGraph, parentInstanceVertex, traitInstanceVertex, relationshipLabel);
}
private void mapPrimitiveToVertex(ITypedInstance typedInstance,
Vertex instanceVertex,
private void mapPrimitiveToVertex(ITypedInstance typedInstance, Vertex instanceVertex,
AttributeInfo attributeInfo) throws AtlasException {
Object attrValue = typedInstance.get(attributeInfo.name);
if (attrValue == null) {
......@@ -938,22 +870,19 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
public final class GraphToTypedInstanceMapper {
public ITypedReferenceableInstance mapGraphToTypedInstance(String guid,
Vertex instanceVertex)
public ITypedReferenceableInstance mapGraphToTypedInstance(String guid, Vertex instanceVertex)
throws AtlasException {
LOG.debug("Mapping graph root vertex {} to typed instance for guid {}",
instanceVertex, guid);
LOG.debug("Mapping graph root vertex {} to typed instance for guid {}", instanceVertex, guid);
String typeName = instanceVertex.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
List<String> traits = getTraitNames(instanceVertex);
Id id = new Id(guid,
instanceVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY), typeName);
Id id = new Id(guid, instanceVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY), typeName);
LOG.debug("Created id {} for instance type {}", id, typeName);
ClassType classType = typeSystem.getDataType(ClassType.class, typeName);
ITypedReferenceableInstance typedInstance = classType.createInstance(
id, traits.toArray(new String[traits.size()]));
ITypedReferenceableInstance typedInstance =
classType.createInstance(id, traits.toArray(new String[traits.size()]));
mapVertexToInstance(instanceVertex, typedInstance, classType.fieldMapping().fields);
mapVertexToInstanceTraits(instanceVertex, typedInstance, traits);
......@@ -961,23 +890,20 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return typedInstance;
}
private void mapVertexToInstanceTraits(Vertex instanceVertex,
ITypedReferenceableInstance typedInstance,
private void mapVertexToInstanceTraits(Vertex instanceVertex, ITypedReferenceableInstance typedInstance,
List<String> traits) throws AtlasException {
for (String traitName : traits) {
LOG.debug("mapping trait {} to instance", traitName);
TraitType traitType = typeSystem.getDataType(TraitType.class, traitName);
mapVertexToTraitInstance(
instanceVertex, typedInstance, traitName, traitType);
mapVertexToTraitInstance(instanceVertex, typedInstance, traitName, traitType);
}
}
public void mapVertexToInstance(Vertex instanceVertex, ITypedInstance typedInstance,
Map<String, AttributeInfo> fields)
throws AtlasException {
Map<String, AttributeInfo> fields) throws AtlasException {
LOG.debug("Mapping vertex {} to instance {} for fields",
instanceVertex, typedInstance.getTypeName(), fields);
LOG.debug("Mapping vertex {} to instance {} for fields", instanceVertex, typedInstance.getTypeName(),
fields);
for (AttributeInfo attributeInfo : fields.values()) {
mapVertexToAttribute(instanceVertex, typedInstance, attributeInfo);
}
......@@ -1000,18 +926,17 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return;
}
typedInstance.set(attributeInfo.name, dataType.convert(instanceVertex.<String>getProperty
(vertexPropertyName), Multiplicity.REQUIRED));
typedInstance.set(attributeInfo.name,
dataType.convert(instanceVertex.<String>getProperty(vertexPropertyName),
Multiplicity.REQUIRED));
break;
case ARRAY:
mapVertexToArrayInstance(
instanceVertex, typedInstance, attributeInfo, vertexPropertyName);
mapVertexToArrayInstance(instanceVertex, typedInstance, attributeInfo, vertexPropertyName);
break;
case MAP:
mapVertexToMapInstance(
instanceVertex, typedInstance, attributeInfo, vertexPropertyName);
mapVertexToMapInstance(instanceVertex, typedInstance, attributeInfo, vertexPropertyName);
break;
case STRUCT:
......@@ -1024,8 +949,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
case CLASS:
String relationshipLabel = getEdgeLabel(typedInstance, attributeInfo);
Object idOrInstance = mapClassReferenceToVertex(instanceVertex,
attributeInfo, relationshipLabel, attributeInfo.dataType());
Object idOrInstance = mapClassReferenceToVertex(instanceVertex, attributeInfo, relationshipLabel,
attributeInfo.dataType());
typedInstance.set(attributeInfo.name, idOrInstance);
break;
......@@ -1034,25 +959,21 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
}
private Object mapClassReferenceToVertex(Vertex instanceVertex,
AttributeInfo attributeInfo,
String relationshipLabel,
IDataType dataType) throws AtlasException {
private Object mapClassReferenceToVertex(Vertex instanceVertex, AttributeInfo attributeInfo,
String relationshipLabel, IDataType dataType) throws AtlasException {
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
Iterator<Edge> results = instanceVertex.getEdges(
Direction.OUT, relationshipLabel).iterator();
Iterator<Edge> results = instanceVertex.getEdges(Direction.OUT, relationshipLabel).iterator();
if (results.hasNext()) {
final Vertex referenceVertex = results.next().getVertex(Direction.IN);
if (referenceVertex != null) {
final String guid = referenceVertex.getProperty(Constants.GUID_PROPERTY_KEY);
LOG.debug("Found vertex {} for label {} with guid {}",
referenceVertex, relationshipLabel, guid);
LOG.debug("Found vertex {} for label {} with guid {}", referenceVertex, relationshipLabel, guid);
if (attributeInfo.isComposite) {
LOG.debug("Found composite, mapping vertex to instance");
return mapGraphToTypedInstance(guid, referenceVertex);
} else {
Id referenceId = new Id(guid,
referenceVertex.<Integer>getProperty( Constants.VERSION_PROPERTY_KEY),
Id referenceId =
new Id(guid, referenceVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY),
dataType.getName());
LOG.debug("Found non-composite, adding id {} ", referenceId);
return referenceId;
......@@ -1065,8 +986,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
@SuppressWarnings("unchecked")
private void mapVertexToArrayInstance(Vertex instanceVertex, ITypedInstance typedInstance,
AttributeInfo attributeInfo,
String propertyName) throws AtlasException {
AttributeInfo attributeInfo, String propertyName) throws AtlasException {
LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name);
List list = instanceVertex.getProperty(propertyName);
if (list == null || list.size() == 0) {
......@@ -1084,10 +1004,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
typedInstance.set(attributeInfo.name, values);
}
private Object mapVertexToCollectionEntry(Vertex instanceVertex,
AttributeInfo attributeInfo,
IDataType elementType, Object value, String propertyName)
throws AtlasException {
private Object mapVertexToCollectionEntry(Vertex instanceVertex, AttributeInfo attributeInfo,
IDataType elementType, Object value, String propertyName) throws AtlasException {
String edgeLabel = EDGE_LABEL_PREFIX + propertyName;
switch (elementType.getTypeCategory()) {
case PRIMITIVE:
......@@ -1101,12 +1019,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
break;
case STRUCT:
return getStructInstanceFromVertex(instanceVertex,
elementType, attributeInfo.name, edgeLabel, (String) value);
return getStructInstanceFromVertex(instanceVertex, elementType, attributeInfo.name, edgeLabel,
(String) value);
case CLASS:
return mapClassReferenceToVertex(
instanceVertex, attributeInfo, edgeLabel, elementType, (String) value);
return mapClassReferenceToVertex(instanceVertex, attributeInfo, edgeLabel, elementType, (String) value);
default:
break;
......@@ -1117,8 +1034,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
@SuppressWarnings("unchecked")
private void mapVertexToMapInstance(Vertex instanceVertex, ITypedInstance typedInstance,
AttributeInfo attributeInfo,
final String propertyName) throws AtlasException {
AttributeInfo attributeInfo, final String propertyName) throws AtlasException {
LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name);
List<String> keys = instanceVertex.getProperty(propertyName);
if (keys == null || keys.size() == 0) {
......@@ -1131,33 +1047,28 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
for (String key : keys) {
String keyPropertyName = propertyName + "." + key;
Object keyValue = instanceVertex.getProperty(keyPropertyName);
values.put(key, mapVertexToCollectionEntry(instanceVertex, attributeInfo,
valueType, keyValue, propertyName));
values.put(key,
mapVertexToCollectionEntry(instanceVertex, attributeInfo, valueType, keyValue, propertyName));
}
typedInstance.set(attributeInfo.name, values);
}
private ITypedStruct getStructInstanceFromVertex(Vertex instanceVertex,
IDataType elemType,
String attributeName, String relationshipLabel,
String edgeId) throws AtlasException {
private ITypedStruct getStructInstanceFromVertex(Vertex instanceVertex, IDataType elemType,
String attributeName, String relationshipLabel, String edgeId) throws AtlasException {
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) {
if (edgeId.equals(String.valueOf(edge.getId()))) {
Vertex structInstanceVertex = edge.getVertex(Direction.IN);
LOG.debug("mapping vertex {} to struct {}", structInstanceVertex,
attributeName);
LOG.debug("mapping vertex {} to struct {}", structInstanceVertex, attributeName);
if (structInstanceVertex != null) {
LOG.debug("Found struct instance vertex {}, mapping to instance {} ",
structInstanceVertex, elemType.getName());
StructType structType = typeSystem
.getDataType(StructType.class, elemType.getName());
LOG.debug("Found struct instance vertex {}, mapping to instance {} ", structInstanceVertex,
elemType.getName());
StructType structType = typeSystem.getDataType(StructType.class, elemType.getName());
ITypedStruct structInstance = structType.createInstance();
mapVertexToInstance(structInstanceVertex, structInstance,
structType.fieldMapping().fields);
mapVertexToInstance(structInstanceVertex, structInstance, structType.fieldMapping().fields);
return structInstance;
}
......@@ -1168,26 +1079,22 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return null;
}
private Object mapClassReferenceToVertex(Vertex instanceVertex,
AttributeInfo attributeInfo,
String relationshipLabel,
IDataType dataType,
String edgeId) throws AtlasException {
private Object mapClassReferenceToVertex(Vertex instanceVertex, AttributeInfo attributeInfo,
String relationshipLabel, IDataType dataType, String edgeId) throws AtlasException {
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) {
if (edgeId.equals(String.valueOf(edge.getId()))) {
final Vertex referenceVertex = edge.getVertex(Direction.IN);
if (referenceVertex != null) {
final String guid = referenceVertex
.getProperty(Constants.GUID_PROPERTY_KEY);
LOG.debug("Found vertex {} for label {} with guid {}",
referenceVertex, relationshipLabel, guid);
final String guid = referenceVertex.getProperty(Constants.GUID_PROPERTY_KEY);
LOG.debug("Found vertex {} for label {} with guid {}", referenceVertex, relationshipLabel,
guid);
if (attributeInfo.isComposite) {
LOG.debug("Found composite, mapping vertex to instance");
return mapGraphToTypedInstance(guid, referenceVertex);
} else {
Id referenceId = new Id(guid,
referenceVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY),
Id referenceId =
new Id(guid, referenceVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY),
dataType.getName());
LOG.debug("Found non-composite, adding id {} ", referenceId);
return referenceId;
......@@ -1201,13 +1108,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return null;
}
private void mapVertexToStructInstance(Vertex instanceVertex,
ITypedInstance typedInstance,
AttributeInfo attributeInfo)
throws AtlasException {
private void mapVertexToStructInstance(Vertex instanceVertex, ITypedInstance typedInstance,
AttributeInfo attributeInfo) throws AtlasException {
LOG.debug("mapping vertex {} to struct {}", instanceVertex, attributeInfo.name);
StructType structType = typeSystem.getDataType(
StructType.class, attributeInfo.dataType().getName());
StructType structType = typeSystem.getDataType(StructType.class, attributeInfo.dataType().getName());
ITypedStruct structInstance = structType.createInstance();
typedInstance.set(attributeInfo.name, structInstance);
......@@ -1216,44 +1120,37 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) {
final Vertex structInstanceVertex = edge.getVertex(Direction.IN);
if (structInstanceVertex != null) {
LOG.debug("Found struct instance vertex {}, mapping to instance {} ",
structInstanceVertex, structInstance.getTypeName());
mapVertexToInstance(structInstanceVertex, structInstance,
structType.fieldMapping().fields);
LOG.debug("Found struct instance vertex {}, mapping to instance {} ", structInstanceVertex,
structInstance.getTypeName());
mapVertexToInstance(structInstanceVertex, structInstance, structType.fieldMapping().fields);
break;
}
}
}
private void mapVertexToTraitInstance(Vertex instanceVertex,
ITypedReferenceableInstance typedInstance,
String traitName,
TraitType traitType) throws AtlasException {
private void mapVertexToTraitInstance(Vertex instanceVertex, ITypedReferenceableInstance typedInstance,
String traitName, TraitType traitType) throws AtlasException {
ITypedStruct traitInstance = (ITypedStruct) typedInstance.getTrait(traitName);
mapVertexToTraitInstance(instanceVertex, typedInstance.getTypeName(),
traitName, traitType, traitInstance);
mapVertexToTraitInstance(instanceVertex, typedInstance.getTypeName(), traitName, traitType, traitInstance);
}
private void mapVertexToTraitInstance(Vertex instanceVertex, String typedInstanceTypeName,
String traitName, TraitType traitType,
ITypedStruct traitInstance) throws AtlasException {
private void mapVertexToTraitInstance(Vertex instanceVertex, String typedInstanceTypeName, String traitName,
TraitType traitType, ITypedStruct traitInstance) throws AtlasException {
String relationshipLabel = getEdgeLabel(typedInstanceTypeName, traitName);
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) {
final Vertex traitInstanceVertex = edge.getVertex(Direction.IN);
if (traitInstanceVertex != null) {
LOG.debug("Found trait instance vertex {}, mapping to instance {} ",
traitInstanceVertex, traitInstance.getTypeName());
mapVertexToInstance(traitInstanceVertex, traitInstance,
traitType.fieldMapping().fields);
LOG.debug("Found trait instance vertex {}, mapping to instance {} ", traitInstanceVertex,
traitInstance.getTypeName());
mapVertexToInstance(traitInstanceVertex, traitInstance, traitType.fieldMapping().fields);
break;
}
}
}
private void mapVertexToPrimitive(Vertex instanceVertex,
ITypedInstance typedInstance,
private void mapVertexToPrimitive(Vertex instanceVertex, ITypedInstance typedInstance,
AttributeInfo attributeInfo) throws AtlasException {
LOG.debug("Adding primitive {} from vertex {}", attributeInfo, instanceVertex);
final String vertexPropertyName = getQualifiedName(typedInstance, attributeInfo);
......@@ -1262,38 +1159,28 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
if (attributeInfo.dataType() == DataTypes.STRING_TYPE) {
typedInstance.setString(attributeInfo.name,
instanceVertex.<String>getProperty(vertexPropertyName));
typedInstance.setString(attributeInfo.name, instanceVertex.<String>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.SHORT_TYPE) {
typedInstance.setShort(attributeInfo.name,
instanceVertex.<Short>getProperty(vertexPropertyName));
typedInstance.setShort(attributeInfo.name, instanceVertex.<Short>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.INT_TYPE) {
typedInstance.setInt(attributeInfo.name,
instanceVertex.<Integer>getProperty(vertexPropertyName));
typedInstance.setInt(attributeInfo.name, instanceVertex.<Integer>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.BIGINTEGER_TYPE) {
typedInstance.setBigInt(attributeInfo.name,
instanceVertex.<BigInteger>getProperty(vertexPropertyName));
typedInstance.setBigInt(attributeInfo.name, instanceVertex.<BigInteger>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.BOOLEAN_TYPE) {
typedInstance.setBoolean(attributeInfo.name,
instanceVertex.<Boolean>getProperty(vertexPropertyName));
typedInstance.setBoolean(attributeInfo.name, instanceVertex.<Boolean>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.BYTE_TYPE) {
typedInstance.setByte(attributeInfo.name,
instanceVertex.<Byte>getProperty(vertexPropertyName));
typedInstance.setByte(attributeInfo.name, instanceVertex.<Byte>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.LONG_TYPE) {
typedInstance.setLong(attributeInfo.name,
instanceVertex.<Long>getProperty(vertexPropertyName));
typedInstance.setLong(attributeInfo.name, instanceVertex.<Long>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.FLOAT_TYPE) {
typedInstance.setFloat(attributeInfo.name,
instanceVertex.<Float>getProperty(vertexPropertyName));
typedInstance.setFloat(attributeInfo.name, instanceVertex.<Float>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.DOUBLE_TYPE) {
typedInstance.setDouble(attributeInfo.name,
instanceVertex.<Double>getProperty(vertexPropertyName));
typedInstance.setDouble(attributeInfo.name, instanceVertex.<Double>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.BIGDECIMAL_TYPE) {
typedInstance.setBigDecimal(attributeInfo.name,
instanceVertex.<BigDecimal>getProperty(vertexPropertyName));
typedInstance
.setBigDecimal(attributeInfo.name, instanceVertex.<BigDecimal>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.DATE_TYPE) {
typedInstance.setDate(attributeInfo.name,
instanceVertex.<Date>getProperty(vertexPropertyName));
typedInstance.setDate(attributeInfo.name, instanceVertex.<Date>getProperty(vertexPropertyName));
}
}
}
......
......@@ -18,7 +18,6 @@
package org.apache.atlas.repository.graph;
import com.google.inject.Provider;
import com.thinkaurelius.titan.core.Cardinality;
import com.thinkaurelius.titan.core.PropertyKey;
import com.thinkaurelius.titan.core.TitanGraph;
......@@ -29,7 +28,6 @@ import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.AtlasException;
import org.apache.atlas.discovery.SearchIndexer;
import org.apache.atlas.listener.TypesChangeListener;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.IndexCreationException;
import org.apache.atlas.repository.IndexException;
......@@ -62,8 +60,7 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
private TitanManagement management;
@Inject
public GraphBackedSearchIndexer(GraphProvider<TitanGraph> graphProvider)
throws RepositoryException {
public GraphBackedSearchIndexer(GraphProvider<TitanGraph> graphProvider) throws RepositoryException {
this.titanGraph = graphProvider.get();
......@@ -85,27 +82,24 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
/* This is called only once, which is the first time Atlas types are made indexable .*/
LOG.info("Indexes do not exist, Creating indexes for titanGraph.");
management.buildIndex(Constants.VERTEX_INDEX, Vertex.class)
.buildMixedIndex(Constants.BACKING_INDEX);
management.buildIndex(Constants.EDGE_INDEX, Edge.class)
.buildMixedIndex(Constants.BACKING_INDEX);
management.buildIndex(Constants.VERTEX_INDEX, Vertex.class).buildMixedIndex(Constants.BACKING_INDEX);
management.buildIndex(Constants.EDGE_INDEX, Edge.class).buildMixedIndex(Constants.BACKING_INDEX);
// create a composite index for guid as its unique
createCompositeIndex(Constants.GUID_INDEX,
Constants.GUID_PROPERTY_KEY, String.class, true, Cardinality.SINGLE);
createCompositeIndex(Constants.GUID_INDEX, Constants.GUID_PROPERTY_KEY, String.class, true, Cardinality.SINGLE);
// create a composite and mixed index for type since it can be combined with other keys
createCompositeAndMixedIndex(Constants.ENTITY_TYPE_INDEX,
Constants.ENTITY_TYPE_PROPERTY_KEY, String.class, false, Cardinality.SINGLE);
createCompositeAndMixedIndex(Constants.ENTITY_TYPE_INDEX, Constants.ENTITY_TYPE_PROPERTY_KEY, String.class,
false, Cardinality.SINGLE);
// create a composite and mixed index for type since it can be combined with other keys
createCompositeAndMixedIndex(Constants.SUPER_TYPES_INDEX,
Constants.SUPER_TYPES_PROPERTY_KEY, String.class, false, Cardinality.SET);
createCompositeAndMixedIndex(Constants.SUPER_TYPES_INDEX, Constants.SUPER_TYPES_PROPERTY_KEY, String.class,
false, Cardinality.SET);
// create a composite and mixed index for traitNames since it can be combined with other
// keys. Traits must be a set and not a list.
createCompositeAndMixedIndex(Constants.TRAIT_NAMES_INDEX,
Constants.TRAIT_NAMES_PROPERTY_KEY, String.class, false, Cardinality.SET);
createCompositeAndMixedIndex(Constants.TRAIT_NAMES_INDEX, Constants.TRAIT_NAMES_PROPERTY_KEY, String.class,
false, Cardinality.SET);
// Index for full text search
createFullTextIndex();
......@@ -132,8 +126,8 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
private void createTypeStoreIndexes() {
//Create unique index on typeName
createCompositeIndex(Constants.TYPENAME_PROPERTY_KEY, Constants.TYPENAME_PROPERTY_KEY, String.class,
true, Cardinality.SINGLE);
createCompositeIndex(Constants.TYPENAME_PROPERTY_KEY, Constants.TYPENAME_PROPERTY_KEY, String.class, true,
Cardinality.SINGLE);
//create index on vertex type
createCompositeIndex(Constants.VERTEX_TYPE_PROPERTY_KEY, Constants.VERTEX_TYPE_PROPERTY_KEY, String.class,
......@@ -150,7 +144,7 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
@Override
public void onAdd(Collection<? extends IDataType> dataTypes) throws AtlasException {
for(IDataType dataType : dataTypes) {
for (IDataType dataType : dataTypes) {
LOG.info("Creating indexes for type name={}, definition={}", dataType.getName(), dataType.getClass());
try {
addIndexForType(dataType);
......@@ -285,27 +279,21 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
}
*/
private void createCompositeAndMixedIndex(String indexName,
String propertyName, Class propertyClass,
private void createCompositeAndMixedIndex(String indexName, String propertyName, Class propertyClass,
boolean isUnique, Cardinality cardinality) {
createCompositeIndex(indexName, propertyName, propertyClass, isUnique, cardinality);
createVertexMixedIndex(propertyName, propertyClass);
}
private PropertyKey createCompositeIndex(String indexName,
String propertyName, Class propertyClass,
private PropertyKey createCompositeIndex(String indexName, String propertyName, Class propertyClass,
boolean isUnique, Cardinality cardinality) {
PropertyKey propertyKey = management.getPropertyKey(propertyName);
if (propertyKey == null) {
propertyKey = management
.makePropertyKey(propertyName)
.dataType(propertyClass)
.cardinality(cardinality)
.make();
propertyKey =
management.makePropertyKey(propertyName).dataType(propertyClass).cardinality(cardinality).make();
TitanManagement.IndexBuilder indexBuilder = management
.buildIndex(indexName, Vertex.class)
.addKey(propertyKey);
TitanManagement.IndexBuilder indexBuilder =
management.buildIndex(indexName, Vertex.class).addKey(propertyKey);
if (isUnique) {
indexBuilder = indexBuilder.unique();
......@@ -323,13 +311,11 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
PropertyKey propertyKey = management.getPropertyKey(propertyName);
if (propertyKey == null) {
// ignored cardinality as Can only index single-valued property keys on vertices
propertyKey = management
.makePropertyKey(propertyName)
.dataType(propertyClass)
.make();
propertyKey = management.makePropertyKey(propertyName).dataType(propertyClass).make();
if (!checkIfMixedIndexApplicable(propertyClass)) {
LOG.debug("Creating composite index for property {} of type {} ", propertyName, propertyClass.getName());
LOG.debug("Creating composite index for property {} of type {} ", propertyName,
propertyClass.getName());
//Use standard index as backing index only supports string, int and geo types
management.buildIndex(propertyName, Vertex.class).addKey(propertyKey).buildCompositeIndex();
LOG.debug("Created composite index for property {} of type {} ", propertyName, propertyClass.getName());
......@@ -348,7 +334,8 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
private boolean checkIfMixedIndexApplicable(Class propertyClass) {
//TODO - Check why date types are failing in ES/Solr
if (propertyClass == Boolean.class || propertyClass == BigDecimal.class || propertyClass == BigInteger.class || propertyClass == Date.class) {
if (propertyClass == Boolean.class || propertyClass == BigDecimal.class || propertyClass == BigInteger.class
|| propertyClass == Date.class) {
return false;
}
return true;
......@@ -358,8 +345,8 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
try {
management.commit();
} catch (Exception e) {
LOG.error("Index commit failed" , e);
throw new IndexException("Index commit failed " , e);
LOG.error("Index commit failed", e);
throw new IndexException("Index commit failed ", e);
}
}
......@@ -367,8 +354,8 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
try {
management.rollback();
} catch (Exception e) {
LOG.error("Index rollback failed " , e);
throw new IndexException("Index rollback failed " , e);
LOG.error("Index rollback failed ", e);
throw new IndexException("Index rollback failed ", e);
}
}
......
......@@ -45,11 +45,10 @@ public final class GraphHelper {
private GraphHelper() {
}
public static Vertex createVertexWithIdentity(Graph graph,
ITypedReferenceableInstance typedInstance,
public static Vertex createVertexWithIdentity(Graph graph, ITypedReferenceableInstance typedInstance,
Set<String> superTypeNames) {
final Vertex vertexWithIdentity = createVertexWithoutIdentity(
graph, typedInstance.getTypeName(), typedInstance.getId(), superTypeNames);
final Vertex vertexWithIdentity =
createVertexWithoutIdentity(graph, typedInstance.getTypeName(), typedInstance.getId(), superTypeNames);
// add identity
final String guid = UUID.randomUUID().toString();
......@@ -58,9 +57,7 @@ public final class GraphHelper {
return vertexWithIdentity;
}
public static Vertex createVertexWithoutIdentity(Graph graph,
String typeName,
Id typedInstanceId,
public static Vertex createVertexWithoutIdentity(Graph graph, String typeName, Id typedInstanceId,
Set<String> superTypeNames) {
final Vertex vertexWithoutIdentity = graph.addVertex(null);
......@@ -69,34 +66,28 @@ public final class GraphHelper {
// add super types
for (String superTypeName : superTypeNames) {
((TitanVertex) vertexWithoutIdentity).addProperty(
Constants.SUPER_TYPES_PROPERTY_KEY, superTypeName);
((TitanVertex) vertexWithoutIdentity).addProperty(Constants.SUPER_TYPES_PROPERTY_KEY, superTypeName);
}
// add version information
vertexWithoutIdentity.setProperty(Constants.VERSION_PROPERTY_KEY, typedInstanceId.version);
// add timestamp information
vertexWithoutIdentity.setProperty(
Constants.TIMESTAMP_PROPERTY_KEY, System.currentTimeMillis());
vertexWithoutIdentity.setProperty(Constants.TIMESTAMP_PROPERTY_KEY, System.currentTimeMillis());
return vertexWithoutIdentity;
}
public static Edge addEdge(TitanGraph titanGraph, Vertex fromVertex, Vertex toVertex,
String edgeLabel) {
LOG.debug("Adding edge for {} -> label {} -> {}",
fromVertex, edgeLabel, toVertex);
public static Edge addEdge(TitanGraph titanGraph, Vertex fromVertex, Vertex toVertex, String edgeLabel) {
LOG.debug("Adding edge for {} -> label {} -> {}", fromVertex, edgeLabel, toVertex);
return titanGraph.addEdge(null, fromVertex, toVertex, edgeLabel);
}
public static Vertex findVertexByGUID(TitanGraph titanGraph,
String value) {
public static Vertex findVertexByGUID(TitanGraph titanGraph, String value) {
LOG.debug("Finding vertex for key={}, value={}", Constants.GUID_PROPERTY_KEY, value);
GraphQuery query = titanGraph.query()
.has(Constants.GUID_PROPERTY_KEY, value);
GraphQuery query = titanGraph.query().has(Constants.GUID_PROPERTY_KEY, value);
Iterator<Vertex> results = query.vertices().iterator();
// returning one since guid should be unique
return results.hasNext() ? results.next() : null;
......@@ -105,20 +96,15 @@ public final class GraphHelper {
public static String vertexString(final Vertex vertex) {
StringBuilder properties = new StringBuilder();
for (String propertyKey : vertex.getPropertyKeys()) {
properties.append(propertyKey)
.append("=").append(vertex.getProperty(propertyKey))
.append(", ");
properties.append(propertyKey).append("=").append(vertex.getProperty(propertyKey)).append(", ");
}
return "v[" + vertex.getId() + "], Properties[" + properties + "]";
}
public static String edgeString(final Edge edge) {
return "e[" + edge.getLabel() + "], ["
+ edge.getVertex(Direction.OUT)
+ " -> " + edge.getLabel() + " -> "
+ edge.getVertex(Direction.IN)
+ "]";
return "e[" + edge.getLabel() + "], [" + edge.getVertex(Direction.OUT) + " -> " + edge.getLabel() + " -> "
+ edge.getVertex(Direction.IN) + "]";
}
/*
......
......@@ -75,8 +75,7 @@ public class AttributeStores {
} else if (i.dataType() == DataTypes.STRING_TYPE) {
return new StringStore(i);
} else {
throw new RepositoryException(
String.format("Unknown datatype %s", i.dataType()));
throw new RepositoryException(String.format("Unknown datatype %s", i.dataType()));
}
case ENUM:
return new IntAttributeStore(i);
......@@ -89,8 +88,7 @@ public class AttributeStores {
case CLASS:
return new IdStore(i);
default:
throw new RepositoryException(
String.format("Unknown Category for datatype %s", i.dataType()));
throw new RepositoryException(String.format("Unknown Category for datatype %s", i.dataType()));
}
}
......@@ -113,8 +111,7 @@ public class AttributeStores {
return nullList.get(pos);
}
void storeHiddenVals(int pos, IConstructableType type, StructInstance instance)
throws RepositoryException {
void storeHiddenVals(int pos, IConstructableType type, StructInstance instance) throws RepositoryException {
List<String> attrNames = type.getNames(attrInfo);
Map<String, Object> m = hiddenVals.get(pos);
if (m == null) {
......@@ -134,8 +131,7 @@ public class AttributeStores {
}
}
void loadHiddenVals(int pos, IConstructableType type, StructInstance instance)
throws RepositoryException {
void loadHiddenVals(int pos, IConstructableType type, StructInstance instance) throws RepositoryException {
List<String> attrNames = type.getNames(attrInfo);
Map<String, Object> m = hiddenVals.get(pos);
for (int i = 2; i < attrNames.size(); i++) {
......@@ -153,8 +149,7 @@ public class AttributeStores {
}
@Override
public void store(int pos, IConstructableType type, StructInstance instance)
throws RepositoryException {
public void store(int pos, IConstructableType type, StructInstance instance) throws RepositoryException {
List<String> attrNames = type.getNames(attrInfo);
String attrName = attrNames.get(0);
int nullPos = instance.fieldMapping().fieldNullPos.get(attrName);
......@@ -175,8 +170,7 @@ public class AttributeStores {
}
@Override
public void load(int pos, IConstructableType type, StructInstance instance)
throws RepositoryException {
public void load(int pos, IConstructableType type, StructInstance instance) throws RepositoryException {
List<String> attrNames = type.getNames(attrInfo);
String attrName = attrNames.get(0);
int nullPos = instance.fieldMapping().fieldNullPos.get(attrName);
......@@ -197,20 +191,17 @@ public class AttributeStores {
/*
* store the value from colPos in instance into the list.
*/
protected abstract void store(StructInstance instance, int colPos, int pos)
throws RepositoryException;
protected abstract void store(StructInstance instance, int colPos, int pos) throws RepositoryException;
/*
* load the value from pos in list into colPos in instance.
*/
protected abstract void load(StructInstance instance, int colPos, int pos)
throws RepositoryException;
protected abstract void load(StructInstance instance, int colPos, int pos) throws RepositoryException;
/*
* store the value from colPos in map as attrName
*/
protected abstract void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m);
protected abstract void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m);
/*
* load the val into colPos in instance.
......@@ -219,8 +210,7 @@ public class AttributeStores {
}
static abstract class PrimitiveAttributeStore extends AbstractAttributeStore
implements IAttributeStore {
static abstract class PrimitiveAttributeStore extends AbstractAttributeStore implements IAttributeStore {
public PrimitiveAttributeStore(AttributeInfo attrInfo) {
......@@ -246,8 +236,7 @@ public class AttributeStores {
instance.bools[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.bools[colPos]);
}
......@@ -279,8 +268,7 @@ public class AttributeStores {
instance.bytes[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.bytes[colPos]);
}
......@@ -312,8 +300,7 @@ public class AttributeStores {
instance.shorts[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.shorts[colPos]);
}
......@@ -345,8 +332,7 @@ public class AttributeStores {
instance.ints[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.ints[colPos]);
}
......@@ -378,8 +364,7 @@ public class AttributeStores {
instance.longs[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.longs[colPos]);
}
......@@ -411,8 +396,7 @@ public class AttributeStores {
instance.floats[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.floats[colPos]);
}
......@@ -444,8 +428,7 @@ public class AttributeStores {
instance.doubles[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.doubles[colPos]);
}
......@@ -492,8 +475,7 @@ public class AttributeStores {
instance.bigIntegers[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.bigIntegers[colPos]);
}
......@@ -517,8 +499,7 @@ public class AttributeStores {
instance.bigDecimals[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.bigDecimals[colPos]);
}
......@@ -542,8 +523,7 @@ public class AttributeStores {
instance.dates[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.dates[colPos]);
}
......@@ -567,8 +547,7 @@ public class AttributeStores {
instance.strings[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.strings[colPos]);
}
......@@ -592,8 +571,7 @@ public class AttributeStores {
instance.ids[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.ids[colPos]);
}
......@@ -617,8 +595,7 @@ public class AttributeStores {
instance.arrays[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.arrays[colPos]);
}
......@@ -642,8 +619,7 @@ public class AttributeStores {
instance.maps[colPos] = list.get(pos);
}
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.maps[colPos]);
}
......
......@@ -32,8 +32,7 @@ public class ClassStore extends HierarchicalTypeStore {
final ArrayList<ImmutableList<String>> traitNamesStore;
final ClassType classType;
public ClassStore(MemRepository repository, ClassType hierarchicalType)
throws RepositoryException {
public ClassStore(MemRepository repository, ClassType hierarchicalType) throws RepositoryException {
super(repository, hierarchicalType);
classType = hierarchicalType;
traitNamesStore = new ArrayList<ImmutableList<String>>();
......@@ -64,8 +63,7 @@ public class ClassStore extends HierarchicalTypeStore {
String typeName = typeNameList.get(pos);
if (typeName != hierarchicalType.getName()) {
throw new RepositoryException(
String.format("Invalid Id (incorrect typeName, type is %s) : %s",
typeName, id));
String.format("Invalid Id (incorrect typeName, type is %s) : %s", typeName, id));
}
return true;
......
......@@ -60,13 +60,11 @@ public abstract class HierarchicalTypeStore {
*/
ReentrantReadWriteLock lock;
HierarchicalTypeStore(MemRepository repository, HierarchicalType hierarchicalType)
throws RepositoryException {
HierarchicalTypeStore(MemRepository repository, HierarchicalType hierarchicalType) throws RepositoryException {
this.hierarchicalType = (IConstructableType) hierarchicalType;
this.repository = repository;
ImmutableMap.Builder<AttributeInfo, IAttributeStore> b
= new ImmutableBiMap.Builder<AttributeInfo,
IAttributeStore>();
ImmutableMap.Builder<AttributeInfo, IAttributeStore> b =
new ImmutableBiMap.Builder<AttributeInfo, IAttributeStore>();
typeNameList = Lists.newArrayList((String) null);
ImmutableList<AttributeInfo> l = hierarchicalType.immediateAttrs;
for (AttributeInfo i : l) {
......@@ -74,8 +72,7 @@ public abstract class HierarchicalTypeStore {
}
attrStores = b.build();
ImmutableList.Builder<HierarchicalTypeStore> b1
= new ImmutableList.Builder<HierarchicalTypeStore>();
ImmutableList.Builder<HierarchicalTypeStore> b1 = new ImmutableList.Builder<HierarchicalTypeStore>();
Set<String> allSuperTypeNames = hierarchicalType.getAllSuperTypeNames();
for (String s : allSuperTypeNames) {
b1.add(repository.getStore(s));
......
......@@ -29,8 +29,7 @@ public interface IAttributeStore {
* @param instance
* @throws RepositoryException
*/
void store(int pos, IConstructableType type, StructInstance instance)
throws RepositoryException;
void store(int pos, IConstructableType type, StructInstance instance) throws RepositoryException;
/**
* load the Instance with the value from position 'pos' for the attribute.
......
......@@ -129,9 +129,8 @@ public class MemRepository implements IRepository {
*/
for (Id oldId : discoverInstances.idToNewIdMap.keySet()) {
if (!discoverInstances.idToInstanceMap.containsKey(oldId)) {
throw new RepositoryException(String.format("Invalid Object Graph: " +
"Encountered an unassignedId %s that is not associated with an Instance",
oldId));
throw new RepositoryException(String.format("Invalid Object Graph: "
+ "Encountered an unassignedId %s that is not associated with an Instance", oldId));
}
}
......@@ -140,18 +139,14 @@ public class MemRepository implements IRepository {
* - create a ITypedReferenceableInstance.
* replace any old References ( ids or object references) with new Ids.
*/
List<ITypedReferenceableInstance> newInstances
= new ArrayList<ITypedReferenceableInstance>();
List<ITypedReferenceableInstance> newInstances = new ArrayList<ITypedReferenceableInstance>();
ITypedReferenceableInstance retInstance = null;
Set<ClassType> classTypes = new TreeSet<ClassType>();
Set<TraitType> traitTypes = new TreeSet<TraitType>();
for (IReferenceableInstance transientInstance : discoverInstances.idToInstanceMap
.values()) {
for (IReferenceableInstance transientInstance : discoverInstances.idToInstanceMap.values()) {
try {
ClassType cT = typeSystem
.getDataType(ClassType.class, transientInstance.getTypeName());
ITypedReferenceableInstance newInstance = cT
.convert(transientInstance, Multiplicity.REQUIRED);
ClassType cT = typeSystem.getDataType(ClassType.class, transientInstance.getTypeName());
ITypedReferenceableInstance newInstance = cT.convert(transientInstance, Multiplicity.REQUIRED);
newInstances.add(newInstance);
classTypes.add(cT);
......@@ -172,8 +167,7 @@ public class MemRepository implements IRepository {
} catch (AtlasException me) {
throw new RepositoryException(
String.format("Failed to create Instance(id = %s",
transientInstance.getId()), me);
String.format("Failed to create Instance(id = %s", transientInstance.getId()), me);
}
}
......@@ -237,8 +231,7 @@ public class MemRepository implements IRepository {
return retInstance;
}
public ITypedReferenceableInstance update(ITypedReferenceableInstance i)
throws RepositoryException {
public ITypedReferenceableInstance update(ITypedReferenceableInstance i) throws RepositoryException {
throw new RepositoryException("not implemented");
}
......@@ -267,8 +260,7 @@ public class MemRepository implements IRepository {
* - load instance traits
* - add to GraphWalker
*/
ITypedReferenceableInstance getDuringWalk(Id id, ObjectGraphWalker walker)
throws RepositoryException {
ITypedReferenceableInstance getDuringWalk(Id id, ObjectGraphWalker walker) throws RepositoryException {
ClassStore cS = getClassStore(id.getTypeName());
if (cS == null) {
throw new RepositoryException(String.format("Unknown Class %s", id.getTypeName()));
......
......@@ -67,22 +67,18 @@ public class ReplaceIdWithInstance implements ObjectGraphWalker.NodeProcessor {
convertToInstances((ImmutableCollection) nd.value, nd.aInfo.multiplicity, aT));
} else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
DataTypes.MapType mT = (DataTypes.MapType) nd.aInfo.dataType();
nd.instance.set(nd.attributeName,
convertToInstances((ImmutableMap) nd.value, nd.aInfo.multiplicity, mT));
nd.instance.set(nd.attributeName, convertToInstances((ImmutableMap) nd.value, nd.aInfo.multiplicity, mT));
}
}
ImmutableCollection<?> convertToInstances(ImmutableCollection<?> val,
Multiplicity m, DataTypes.ArrayType arrType)
ImmutableCollection<?> convertToInstances(ImmutableCollection<?> val, Multiplicity m, DataTypes.ArrayType arrType)
throws AtlasException {
if (val == null ||
arrType.getElemType().getTypeCategory() != DataTypes.TypeCategory.CLASS) {
if (val == null || arrType.getElemType().getTypeCategory() != DataTypes.TypeCategory.CLASS) {
return val;
}
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder()
: ImmutableList.builder();
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder() : ImmutableList.builder();
Iterator it = val.iterator();
while (it.hasNext()) {
Object elem = it.next();
......@@ -97,13 +93,11 @@ public class ReplaceIdWithInstance implements ObjectGraphWalker.NodeProcessor {
return b.build();
}
ImmutableMap<?, ?> convertToInstances(ImmutableMap val, Multiplicity m,
DataTypes.MapType mapType)
ImmutableMap<?, ?> convertToInstances(ImmutableMap val, Multiplicity m, DataTypes.MapType mapType)
throws AtlasException {
if (val == null ||
(mapType.getKeyType().getTypeCategory() != DataTypes.TypeCategory.CLASS &&
mapType.getValueType().getTypeCategory() != DataTypes.TypeCategory.CLASS)) {
if (val == null || (mapType.getKeyType().getTypeCategory() != DataTypes.TypeCategory.CLASS
&& mapType.getValueType().getTypeCategory() != DataTypes.TypeCategory.CLASS)) {
return val;
}
ImmutableMap.Builder b = ImmutableMap.builder();
......
......@@ -65,8 +65,7 @@ public class StructStore extends AttributeStores.AbstractAttributeStore implemen
}
@Override
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.structs[colPos]);
}
......
......@@ -29,8 +29,7 @@ public class TraitStore extends HierarchicalTypeStore {
final ArrayList<String> classNameStore;
public TraitStore(MemRepository repository, TraitType hierarchicalType)
throws RepositoryException {
public TraitStore(MemRepository repository, TraitType hierarchicalType) throws RepositoryException {
super(repository, hierarchicalType);
classNameStore = new ArrayList<>();
}
......
......@@ -25,8 +25,8 @@ import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.AtlasException;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphProvider;
import org.apache.atlas.typesystem.TypesDef;
......@@ -81,7 +81,7 @@ public class GraphBackedTypeStore implements ITypeStore {
LOG.debug("Processing {}.{} in type store", dataType.getTypeCategory(), dataType.getName());
switch (dataType.getTypeCategory()) {
case ENUM:
storeInGraph((EnumType)dataType);
storeInGraph((EnumType) dataType);
break;
case STRUCT:
......@@ -93,8 +93,8 @@ public class GraphBackedTypeStore implements ITypeStore {
case TRAIT:
case CLASS:
HierarchicalType type = (HierarchicalType) dataType;
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
type.immediateAttrs, type.superTypes);
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(), type.immediateAttrs,
type.superTypes);
break;
default: //Ignore primitive/collection types as they are covered under references
......@@ -132,8 +132,7 @@ public class GraphBackedTypeStore implements ITypeStore {
}
private void storeInGraph(TypeSystem typeSystem, DataTypes.TypeCategory category, String typeName,
ImmutableList<AttributeInfo> attributes, ImmutableList<String> superTypes) throws
AtlasException {
ImmutableList<AttributeInfo> attributes, ImmutableList<String> superTypes) throws AtlasException {
Vertex vertex = createVertex(category, typeName);
List<String> attrNames = new ArrayList<>();
if (attributes != null) {
......@@ -161,8 +160,8 @@ public class GraphBackedTypeStore implements ITypeStore {
}
//Add edges for complex attributes
private void addReferencesForAttribute(TypeSystem typeSystem, Vertex vertex, AttributeInfo attribute) throws
AtlasException {
private void addReferencesForAttribute(TypeSystem typeSystem, Vertex vertex, AttributeInfo attribute)
throws AtlasException {
ImmutableList<String> coreTypes = typeSystem.getCoreTypes();
List<IDataType> attrDataTypes = new ArrayList<>();
IDataType attrDataType = attribute.dataType();
......@@ -193,7 +192,8 @@ public class GraphBackedTypeStore implements ITypeStore {
break;
default:
throw new IllegalArgumentException("Attribute cannot reference instances of type : " + attrDataType.getTypeCategory());
throw new IllegalArgumentException(
"Attribute cannot reference instances of type : " + attrDataType.getTypeCategory());
}
for (IDataType attrType : attrDataTypes) {
......
......@@ -21,12 +21,11 @@ package org.apache.atlas.services;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.inject.Provider;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.ParamChecker;
import org.apache.atlas.TypeNotFoundException;
import org.apache.atlas.classification.InterfaceAudience;
import org.apache.atlas.discovery.SearchIndexer;
import org.apache.atlas.listener.EntityChangeListener;
import org.apache.atlas.listener.TypesChangeListener;
import org.apache.atlas.repository.IndexCreationException;
......@@ -55,17 +54,14 @@ import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.actors.threadpool.Arrays;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Simple wrapper over TypeSystem and MetadataRepository services with hooks
......@@ -74,11 +70,9 @@ import java.util.Set;
@Singleton
public class DefaultMetadataService implements MetadataService {
private static final Logger LOG =
LoggerFactory.getLogger(DefaultMetadataService.class);
private static final Logger LOG = LoggerFactory.getLogger(DefaultMetadataService.class);
private final Collection<EntityChangeListener> entityChangeListeners
= new LinkedHashSet<>();
private final Collection<EntityChangeListener> entityChangeListeners = new LinkedHashSet<>();
private final TypeSystem typeSystem;
private final MetadataRepository repository;
......@@ -122,22 +116,20 @@ public class DefaultMetadataService implements MetadataService {
return; // this is already registered
}
HierarchicalTypeDefinition<ClassType> infraType =
TypesUtil.createClassTypeDef(AtlasClient.INFRASTRUCTURE_SUPER_TYPE,
ImmutableList.<String>of(), NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE);
HierarchicalTypeDefinition<ClassType> infraType = TypesUtil
.createClassTypeDef(AtlasClient.INFRASTRUCTURE_SUPER_TYPE, ImmutableList.<String>of(), NAME_ATTRIBUTE,
DESCRIPTION_ATTRIBUTE);
HierarchicalTypeDefinition<ClassType> datasetType = TypesUtil
.createClassTypeDef(AtlasClient.DATA_SET_SUPER_TYPE,
ImmutableList.<String>of(),
NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE);
.createClassTypeDef(AtlasClient.DATA_SET_SUPER_TYPE, ImmutableList.<String>of(), NAME_ATTRIBUTE,
DESCRIPTION_ATTRIBUTE);
HierarchicalTypeDefinition<ClassType> processType = TypesUtil
.createClassTypeDef(AtlasClient.PROCESS_SUPER_TYPE, ImmutableList.<String>of(),
NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE, new AttributeDefinition("inputs",
DataTypes.arrayTypeName(AtlasClient.DATA_SET_SUPER_TYPE),
.createClassTypeDef(AtlasClient.PROCESS_SUPER_TYPE, ImmutableList.<String>of(), NAME_ATTRIBUTE,
DESCRIPTION_ATTRIBUTE,
new AttributeDefinition("inputs", DataTypes.arrayTypeName(AtlasClient.DATA_SET_SUPER_TYPE),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("outputs",
DataTypes.arrayTypeName(AtlasClient.DATA_SET_SUPER_TYPE),
new AttributeDefinition("outputs", DataTypes.arrayTypeName(AtlasClient.DATA_SET_SUPER_TYPE),
Multiplicity.OPTIONAL, false, null));
TypesDef typesDef = TypeUtils
......@@ -234,8 +226,7 @@ public class DefaultMetadataService implements MetadataService {
public String createEntity(String entityInstanceDefinition) throws AtlasException {
ParamChecker.notEmpty(entityInstanceDefinition, "Entity instance definition cannot be empty");
ITypedReferenceableInstance entityTypedInstance =
deserializeClassInstance(entityInstanceDefinition);
ITypedReferenceableInstance entityTypedInstance = deserializeClassInstance(entityInstanceDefinition);
final String guid = repository.createEntity(entityTypedInstance);
......@@ -243,13 +234,12 @@ public class DefaultMetadataService implements MetadataService {
return guid;
}
private ITypedReferenceableInstance deserializeClassInstance(
String entityInstanceDefinition) throws AtlasException {
private ITypedReferenceableInstance deserializeClassInstance(String entityInstanceDefinition)
throws AtlasException {
final Referenceable entityInstance;
try {
entityInstance = InstanceSerialization.fromJsonReferenceable(
entityInstanceDefinition, true);
entityInstance = InstanceSerialization.fromJsonReferenceable(entityInstanceDefinition, true);
} catch (Exception e) { // exception from deserializer
LOG.error("Unable to deserialize json={}", entityInstanceDefinition, e);
throw new IllegalArgumentException("Unable to deserialize json");
......@@ -327,8 +317,7 @@ public class DefaultMetadataService implements MetadataService {
* @throws AtlasException
*/
@Override
public void addTrait(String guid,
String traitInstanceDefinition) throws AtlasException {
public void addTrait(String guid, String traitInstanceDefinition) throws AtlasException {
ParamChecker.notEmpty(guid, "entity GUID cannot be null");
ParamChecker.notEmpty(traitInstanceDefinition, "Trait instance cannot be null");
......@@ -336,34 +325,32 @@ public class DefaultMetadataService implements MetadataService {
final String traitName = traitInstance.getTypeName();
// ensure trait type is already registered with the TS
if ( !typeSystem.isRegistered(traitName) ) {
if (!typeSystem.isRegistered(traitName)) {
String msg = String.format("trait=%s should be defined in type system before it can be added", traitName);
LOG.error(msg);
throw new TypeNotFoundException(msg);
}
// ensure trait is not already defined
Preconditions.checkArgument(!getTraitNames(guid).contains(traitName),
"trait=%s is already defined for entity=%s", traitName, guid);
Preconditions
.checkArgument(!getTraitNames(guid).contains(traitName), "trait=%s is already defined for entity=%s",
traitName, guid);
repository.addTrait(guid, traitInstance);
onTraitAddedToEntity(guid, traitName);
}
private ITypedStruct deserializeTraitInstance(String traitInstanceDefinition)
throws AtlasException {
private ITypedStruct deserializeTraitInstance(String traitInstanceDefinition) throws AtlasException {
try {
Struct traitInstance = InstanceSerialization.fromJsonStruct(
traitInstanceDefinition, true);
Struct traitInstance = InstanceSerialization.fromJsonStruct(traitInstanceDefinition, true);
final String entityTypeName = traitInstance.getTypeName();
ParamChecker.notEmpty(entityTypeName, "entity type cannot be null");
TraitType traitType = typeSystem.getDataType(TraitType.class, entityTypeName);
return traitType.convert(
traitInstance, Multiplicity.REQUIRED);
} catch ( TypeNotFoundException e ) {
return traitType.convert(traitInstance, Multiplicity.REQUIRED);
} catch (TypeNotFoundException e) {
throw e;
} catch (Exception e) {
throw new AtlasException("Error deserializing trait instance", e);
......@@ -378,13 +365,12 @@ public class DefaultMetadataService implements MetadataService {
* @throws AtlasException
*/
@Override
public void deleteTrait(String guid,
String traitNameToBeDeleted) throws AtlasException {
public void deleteTrait(String guid, String traitNameToBeDeleted) throws AtlasException {
ParamChecker.notEmpty(guid, "entity GUID cannot be null");
ParamChecker.notEmpty(traitNameToBeDeleted, "Trait name cannot be null");
// ensure trait type is already registered with the TS
if ( !typeSystem.isRegistered(traitNameToBeDeleted)) {
if (!typeSystem.isRegistered(traitNameToBeDeleted)) {
final String msg = String.format("trait=%s should be defined in type system before it can be deleted",
traitNameToBeDeleted);
LOG.error(msg);
......@@ -398,7 +384,7 @@ public class DefaultMetadataService implements MetadataService {
private void onTypesAdded(Map<String, IDataType> typesAdded) throws AtlasException {
Map<TypesChangeListener, Throwable> caughtExceptions = new HashMap<>();
for(Provider<TypesChangeListener> indexerProvider : typeChangeListeners) {
for (Provider<TypesChangeListener> indexerProvider : typeChangeListeners) {
final TypesChangeListener listener = indexerProvider.get();
try {
listener.onAdd(typesAdded.values());
......@@ -413,23 +399,20 @@ public class DefaultMetadataService implements MetadataService {
}
}
private void onEntityAddedToRepo(ITypedReferenceableInstance typedInstance)
throws AtlasException {
private void onEntityAddedToRepo(ITypedReferenceableInstance typedInstance) throws AtlasException {
for (EntityChangeListener listener : entityChangeListeners) {
listener.onEntityAdded(typedInstance);
}
}
private void onTraitAddedToEntity(String typeName,
String traitName) throws AtlasException {
private void onTraitAddedToEntity(String typeName, String traitName) throws AtlasException {
for (EntityChangeListener listener : entityChangeListeners) {
listener.onTraitAdded(typeName, traitName);
}
}
private void onTraitDeletedFromEntity(String typeName,
String traitName) throws AtlasException {
private void onTraitDeletedFromEntity(String typeName, String traitName) throws AtlasException {
for (EntityChangeListener listener : entityChangeListeners) {
listener.onTraitDeleted(typeName, traitName);
}
......
......@@ -94,6 +94,7 @@ public interface MetadataService {
void updateEntity(String guid, String property, String value) throws AtlasException;
// Trait management functions
/**
* Gets the list of trait names for a given entity represented by a guid.
*
......@@ -110,8 +111,7 @@ public interface MetadataService {
* @param traitInstanceDefinition trait instance that needs to be added to entity
* @throws AtlasException
*/
void addTrait(String guid,
String traitInstanceDefinition) throws AtlasException;
void addTrait(String guid, String traitInstanceDefinition) throws AtlasException;
/**
* Deletes a given trait from an existing entity represented by a guid.
......@@ -120,6 +120,5 @@ public interface MetadataService {
* @param traitNameToBeDeleted name of the trait
* @throws AtlasException
*/
void deleteTrait(String guid,
String traitNameToBeDeleted) throws AtlasException;
void deleteTrait(String guid, String traitNameToBeDeleted) throws AtlasException;
}
......@@ -94,44 +94,31 @@ public final class TestUtils {
new EnumTypeDefinition("OrgLevel", new EnumValue("L1", 1), new EnumValue("L2", 2));
ts.defineEnumType(orgLevelEnum);
StructTypeDefinition addressDetails = createStructTypeDef("Address",
createRequiredAttrDef("street", DataTypes.STRING_TYPE),
StructTypeDefinition addressDetails =
createStructTypeDef("Address", createRequiredAttrDef("street", DataTypes.STRING_TYPE),
createRequiredAttrDef("city", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<ClassType> deptTypeDef =
createClassTypeDef("Department", ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true,
"department")
);
new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.COLLECTION,
true, "department"));
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createOptionalAttrDef("orgLevel", ts.getDataType(EnumType.class, "OrgLevel")),
createOptionalAttrDef("address", "Address"),
new AttributeDefinition("department",
"Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager",
"Manager", Multiplicity.OPTIONAL, false, "subordinates")
);
HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager",
ImmutableList.of("Person"),
new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, false,
"manager")
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = createTraitTypeDef(
"SecurityClearance",
ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
ts.defineTypes(ImmutableList.of(addressDetails),
ImmutableList.of(securityClearanceTypeDef),
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"));
HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager", ImmutableList.of("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"), Multiplicity.COLLECTION,
false, "manager"));
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
createTraitTypeDef("SecurityClearance", ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.of(addressDetails), ImmutableList.of(securityClearanceTypeDef),
ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
}
......
......@@ -95,8 +95,7 @@ public class GraphBackedDiscoveryServiceTest {
Bindings bindings = engine.createBindings();
bindings.put("g", titanGraph);
String hiveGraphFile = FileUtils.getTempDirectory().getPath()
+ File.separator + System.nanoTime() + ".gson";
String hiveGraphFile = FileUtils.getTempDirectory().getPath() + File.separator + System.nanoTime() + ".gson";
System.out.println("hiveGraphFile = " + hiveGraphFile);
HiveTitanSample.writeGson(hiveGraphFile);
bindings.put("hiveGraphFile", hiveGraphFile);
......@@ -164,71 +163,56 @@ public class GraphBackedDiscoveryServiceTest {
System.out.println("search result = " + r);
// Property Query: list all Person names
r = discoveryService
.searchByGremlin("g.V.filter{it.typeName == 'Person'}.'Person.name'.toList()");
r = discoveryService.searchByGremlin("g.V.filter{it.typeName == 'Person'}.'Person.name'.toList()");
System.out.println("search result = " + r);
}
@DataProvider(name = "dslQueriesProvider")
private Object[][] createDSLQueries() {
return new String[][] {
{"from DB"},
{"DB"},
{"DB where DB.name=\"Reporting\""},
{"DB DB.name = \"Reporting\""},
{"DB where DB.name=\"Reporting\" select name, owner"},
{"DB has name"},
{"DB, Table"},
return new String[][]{{"from DB"}, {"DB"}, {"DB where DB.name=\"Reporting\""}, {"DB DB.name = \"Reporting\""},
{"DB where DB.name=\"Reporting\" select name, owner"}, {"DB has name"}, {"DB, Table"},
{"DB is JdbcAccess"},
/*
{"DB, LoadProcess has name"},
{"DB as db1, Table where db1.name = \"Reporting\""},
{"DB where DB.name=\"Reporting\" and DB.createTime < " + System.currentTimeMillis()},
*/
{"from Table"},
{"Table"},
{"Table is Dimension"},
{"Column where Column isa PII"},
{"from Table"}, {"Table"}, {"Table is Dimension"}, {"Column where Column isa PII"},
{"View is Dimension"},
/*{"Column where Column isa PII select Column.name"},*/
{"Column select Column.name"},
{"Column select name"},
{"Column where Column.name=\"customer_id\""},
{"from Table select Table.name"},
{"DB where (name = \"Reporting\")"},
{"Column select Column.name"}, {"Column select name"}, {"Column where Column.name=\"customer_id\""},
{"from Table select Table.name"}, {"DB where (name = \"Reporting\")"},
{"DB where (name = \"Reporting\") select name as _col_0, owner as _col_1"},
{"DB where DB is JdbcAccess"},
{"DB where DB has name"},
{"DB Table"},
{"DB where DB has name"},
{"DB where DB is JdbcAccess"}, {"DB where DB has name"}, {"DB Table"}, {"DB where DB has name"},
{"DB as db1 Table where (db1.name = \"Reporting\")"},
{"DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 "},
/*
todo: does not work
{"DB where (name = \"Reporting\") and ((createTime + 1) > 0)"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name
as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name
as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
select db1.name as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
select db1.name as dbName, tab.name as tabName"},
*/
// trait searches
{"Dimension"},
/*{"Fact"}, - todo: does not work*/
{"JdbcAccess"},
{"ETL"},
{"Metric"},
{"PII"},
{"JdbcAccess"}, {"ETL"}, {"Metric"}, {"PII"},
// Lineage
{"Table LoadProcess outputTable"},
{"Table loop (LoadProcess outputTable)"},
{"Table LoadProcess outputTable"}, {"Table loop (LoadProcess outputTable)"},
{"Table as _loop0 loop (LoadProcess outputTable) withPath"},
{"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as destTable withPath"},
{"Table as t, sd, Column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType"},
{"Table where name='sales_fact', db where name='Reporting'"}
};
{"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as "
+ "destTable withPath"},
{"Table as t, sd, Column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as "
+ "colType"},
{"Table where name='sales_fact', db where name='Reporting'"}};
}
@Test (dataProvider = "dslQueriesProvider")
@Test(dataProvider = "dslQueriesProvider")
public void testSearchByDSLQueries(String dslQuery) throws Exception {
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
......@@ -254,14 +238,10 @@ public class GraphBackedDiscoveryServiceTest {
@DataProvider(name = "invalidDslQueriesProvider")
private Object[][] createInvalidDSLQueries() {
return new String[][] {
{"from Unknown"},
{"Unknown"},
{"Unknown is Blah"},
};
return new String[][]{{"from Unknown"}, {"Unknown"}, {"Unknown is Blah"},};
}
@Test (dataProvider = "invalidDslQueriesProvider", expectedExceptions = DiscoveryException.class)
@Test(dataProvider = "invalidDslQueriesProvider", expectedExceptions = DiscoveryException.class)
public void testSearchByDSLInvalidQueries(String dslQuery) throws Exception {
System.out.println("Executing dslQuery = " + dslQuery);
discoveryService.searchByDSL(dslQuery);
......@@ -289,17 +269,16 @@ public class GraphBackedDiscoveryServiceTest {
* D(d) extends C
*/
private void createTypesWithMultiLevelInheritance() throws Exception {
HierarchicalTypeDefinition A = createClassTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition B =
createClassTypeDef("B", ImmutableList.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createClassTypeDef("C", ImmutableList.of("B"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createClassTypeDef("C", ImmutableList.of("B"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createClassTypeDef("D", ImmutableList.of("C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition D =
createClassTypeDef("D", ImmutableList.of("C"), createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
TypeSystem.getInstance().defineClassTypes(A, B, C, D);
}
......@@ -312,8 +291,7 @@ public class GraphBackedDiscoveryServiceTest {
instance.set("a", 1);
ClassType deptType = TypeSystem.getInstance().getDataType(ClassType.class, "D");
ITypedReferenceableInstance typedInstance =
deptType.convert(instance, Multiplicity.REQUIRED);
ITypedReferenceableInstance typedInstance = deptType.convert(instance, Multiplicity.REQUIRED);
repositoryService.createEntity(typedInstance);
}
......
......@@ -66,8 +66,8 @@ public class HiveLineageServiceTest {
@Inject
private HiveLineageService hiveLineageService;
// @Inject
// private GraphProvider<TitanGraph> graphProvider;
// @Inject
// private GraphProvider<TitanGraph> graphProvider;
@BeforeClass
public void setUp() throws Exception {
......@@ -79,53 +79,47 @@ public class HiveLineageServiceTest {
@DataProvider(name = "dslQueriesProvider")
private Object[][] createDSLQueries() {
return new String[][] {
return new String[][]{
// joins
{"hive_table where name=\"sales_fact\", columns"},
{"hive_table where name=\"sales_fact\", columns select name, dataType, comment"},
{"hive_table where name=\"sales_fact\", columns as c select c.name, c.dataType, c.comment"},
// {"hive_db as db where (db.name=\"Reporting\"), hive_table as table select db.name, table.name"},
{"from hive_db"},
{"hive_db"},
{"hive_db where hive_db.name=\"Reporting\""},
// {"hive_db as db where (db.name=\"Reporting\"), hive_table as table select db.name,
// table.name"},
{"from hive_db"}, {"hive_db"}, {"hive_db where hive_db.name=\"Reporting\""},
{"hive_db hive_db.name = \"Reporting\""},
{"hive_db where hive_db.name=\"Reporting\" select name, owner"},
{"hive_db has name"},
// {"hive_db, hive_table"},
// {"hive_db, hive_process has name"},
// {"hive_db as db1, hive_table where db1.name = \"Reporting\""},
// {"hive_db where hive_db.name=\"Reporting\" and hive_db.createTime < " + System.currentTimeMillis()},
{"from hive_table"},
{"hive_table"},
{"hive_table is Dimension"},
{"hive_db where hive_db.name=\"Reporting\" select name, owner"}, {"hive_db has name"},
// {"hive_db, hive_table"},
// {"hive_db, hive_process has name"},
// {"hive_db as db1, hive_table where db1.name = \"Reporting\""},
// {"hive_db where hive_db.name=\"Reporting\" and hive_db.createTime < " + System
// .currentTimeMillis()},
{"from hive_table"}, {"hive_table"}, {"hive_table is Dimension"},
{"hive_column where hive_column isa PII"},
// {"hive_column where hive_column isa PII select hive_column.name"},
{"hive_column select hive_column.name"},
{"hive_column select name"},
{"hive_column where hive_column.name=\"customer_id\""},
{"from hive_table select hive_table.name"},
// {"hive_column where hive_column isa PII select hive_column.name"},
{"hive_column select hive_column.name"}, {"hive_column select name"},
{"hive_column where hive_column.name=\"customer_id\""}, {"from hive_table select hive_table.name"},
{"hive_db where (name = \"Reporting\")"},
{"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1"},
{"hive_db where hive_db has name"},
// {"hive_db hive_table"},
// {"hive_db hive_table"},
{"hive_db where hive_db has name"},
// {"hive_db as db1 hive_table where (db1.name = \"Reporting\")"},
// {"hive_db as db1 hive_table where (db1.name = \"Reporting\")"},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 "},
// {"hive_db where (name = \"Reporting\") and ((createTime + 1) > 0)"},
// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName"},
// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName"},
// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
// {"hive_db where (name = \"Reporting\") and ((createTime + 1) > 0)"},
// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
// \"Reporting\") select db1.name as dbName, tab.name as tabName"},
// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) or (db1.name =
// \"Reporting\") select db1.name as dbName, tab.name as tabName"},
// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
// \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
// \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
// trait searches
{"Dimension"},
{"Fact"},
{"ETL"},
{"Metric"},
{"PII"},
};
{"Dimension"}, {"Fact"}, {"ETL"}, {"Metric"}, {"PII"},};
}
@Test (dataProvider = "dslQueriesProvider")
@Test(dataProvider = "dslQueriesProvider")
public void testSearchByDSLQueries(String dslQuery) throws Exception {
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
......@@ -163,19 +157,19 @@ public class HiveLineageServiceTest {
Assert.assertTrue(paths.length() > 0);
}
@Test (expectedExceptions = IllegalArgumentException.class)
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetInputsTableNameNull() throws Exception {
hiveLineageService.getInputs(null);
Assert.fail();
}
@Test (expectedExceptions = IllegalArgumentException.class)
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetInputsTableNameEmpty() throws Exception {
hiveLineageService.getInputs("");
Assert.fail();
}
@Test (expectedExceptions = EntityNotFoundException.class)
@Test(expectedExceptions = EntityNotFoundException.class)
public void testGetInputsBadTableName() throws Exception {
hiveLineageService.getInputs("blah");
Assert.fail();
......@@ -183,8 +177,7 @@ public class HiveLineageServiceTest {
@Test
public void testGetInputsGraph() throws Exception {
JSONObject results = new JSONObject(
hiveLineageService.getInputsGraph("sales_fact_monthly_mv"));
JSONObject results = new JSONObject(hiveLineageService.getInputsGraph("sales_fact_monthly_mv"));
Assert.assertNotNull(results);
System.out.println("inputs graph = " + results);
......@@ -212,19 +205,19 @@ public class HiveLineageServiceTest {
Assert.assertTrue(paths.length() > 0);
}
@Test (expectedExceptions = IllegalArgumentException.class)
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetOututsTableNameNull() throws Exception {
hiveLineageService.getOutputs(null);
Assert.fail();
}
@Test (expectedExceptions = IllegalArgumentException.class)
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetOutputsTableNameEmpty() throws Exception {
hiveLineageService.getOutputs("");
Assert.fail();
}
@Test (expectedExceptions = EntityNotFoundException.class)
@Test(expectedExceptions = EntityNotFoundException.class)
public void testGetOutputsBadTableName() throws Exception {
hiveLineageService.getOutputs("blah");
Assert.fail();
......@@ -248,15 +241,11 @@ public class HiveLineageServiceTest {
@DataProvider(name = "tableNamesProvider")
private Object[][] tableNames() {
return new String[][] {
{"sales_fact", "4"},
{"time_dim", "3"},
{"sales_fact_daily_mv", "4"},
{"sales_fact_monthly_mv", "4"}
};
return new String[][]{{"sales_fact", "4"}, {"time_dim", "3"}, {"sales_fact_daily_mv", "4"},
{"sales_fact_monthly_mv", "4"}};
}
@Test (dataProvider = "tableNamesProvider")
@Test(dataProvider = "tableNamesProvider")
public void testGetSchema(String tableName, String expected) throws Exception {
JSONObject results = new JSONObject(hiveLineageService.getSchema(tableName));
Assert.assertNotNull(results);
......@@ -274,19 +263,19 @@ public class HiveLineageServiceTest {
}
}
@Test (expectedExceptions = IllegalArgumentException.class)
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetSchemaTableNameNull() throws Exception {
hiveLineageService.getSchema(null);
Assert.fail();
}
@Test (expectedExceptions = IllegalArgumentException.class)
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetSchemaTableNameEmpty() throws Exception {
hiveLineageService.getSchema("");
Assert.fail();
}
@Test (expectedExceptions = EntityNotFoundException.class)
@Test(expectedExceptions = EntityNotFoundException.class)
public void testGetSchemaBadTableName() throws Exception {
hiveLineageService.getSchema("blah");
Assert.fail();
......@@ -306,95 +295,62 @@ public class HiveLineageServiceTest {
private static final String VIEW_TYPE = "View";
private TypesDef createTypeDefinitions() {
HierarchicalTypeDefinition<ClassType> dbClsDef
= TypesUtil.createClassTypeDef(DATABASE_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
attrDef("description", DataTypes.STRING_TYPE),
attrDef("locationUri", DataTypes.STRING_TYPE),
attrDef("owner", DataTypes.STRING_TYPE),
attrDef("createTime", DataTypes.LONG_TYPE)
);
HierarchicalTypeDefinition<ClassType> storageDescClsDef =
TypesUtil.createClassTypeDef(STORAGE_DESC_TYPE, null,
attrDef("location", DataTypes.STRING_TYPE),
attrDef("inputFormat", DataTypes.STRING_TYPE),
attrDef("outputFormat", DataTypes.STRING_TYPE),
attrDef("compressed", DataTypes.STRING_TYPE,
Multiplicity.REQUIRED, false, null)
);
HierarchicalTypeDefinition<ClassType> columnClsDef =
TypesUtil.createClassTypeDef(COLUMN_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
attrDef("dataType", DataTypes.STRING_TYPE),
attrDef("comment", DataTypes.STRING_TYPE)
);
HierarchicalTypeDefinition<ClassType> tblClsDef =
TypesUtil.createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"),
attrDef("owner", DataTypes.STRING_TYPE),
attrDef("createTime", DataTypes.LONG_TYPE),
attrDef("lastAccessTime", DataTypes.LONG_TYPE),
attrDef("tableType", DataTypes.STRING_TYPE),
HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
.createClassTypeDef(DATABASE_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
attrDef("description", DataTypes.STRING_TYPE), attrDef("locationUri", DataTypes.STRING_TYPE),
attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE));
HierarchicalTypeDefinition<ClassType> storageDescClsDef = TypesUtil
.createClassTypeDef(STORAGE_DESC_TYPE, null, attrDef("location", DataTypes.STRING_TYPE),
attrDef("inputFormat", DataTypes.STRING_TYPE), attrDef("outputFormat", DataTypes.STRING_TYPE),
attrDef("compressed", DataTypes.STRING_TYPE, Multiplicity.REQUIRED, false, null));
HierarchicalTypeDefinition<ClassType> columnClsDef = TypesUtil
.createClassTypeDef(COLUMN_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
attrDef("dataType", DataTypes.STRING_TYPE), attrDef("comment", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<ClassType> tblClsDef = TypesUtil
.createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"),
attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE),
attrDef("lastAccessTime", DataTypes.LONG_TYPE), attrDef("tableType", DataTypes.STRING_TYPE),
attrDef("temporary", DataTypes.BOOLEAN_TYPE),
new AttributeDefinition("db", DATABASE_TYPE,
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
// todo - uncomment this, something is broken
// new AttributeDefinition("sd", STORAGE_DESC_TYPE,
// Multiplicity.REQUIRED, true, null),
new AttributeDefinition("columns",
DataTypes.arrayTypeName(COLUMN_TYPE),
Multiplicity.COLLECTION, true, null)
);
HierarchicalTypeDefinition<ClassType> loadProcessClsDef =
TypesUtil.createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"),
attrDef("userName", DataTypes.STRING_TYPE),
attrDef("startTime", DataTypes.LONG_TYPE),
// new AttributeDefinition("sd", STORAGE_DESC_TYPE,
// Multiplicity.REQUIRED, true, null),
new AttributeDefinition("columns", DataTypes.arrayTypeName(COLUMN_TYPE),
Multiplicity.COLLECTION, true, null));
HierarchicalTypeDefinition<ClassType> loadProcessClsDef = TypesUtil
.createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"),
attrDef("userName", DataTypes.STRING_TYPE), attrDef("startTime", DataTypes.LONG_TYPE),
attrDef("endTime", DataTypes.LONG_TYPE),
attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED)
);
attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED));
HierarchicalTypeDefinition<ClassType> viewClsDef =
TypesUtil.createClassTypeDef(VIEW_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("db", DATABASE_TYPE,
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("inputTables",
DataTypes.arrayTypeName(HIVE_TABLE_TYPE),
Multiplicity.COLLECTION, false, null)
);
HierarchicalTypeDefinition<ClassType> viewClsDef = TypesUtil
.createClassTypeDef(VIEW_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
new AttributeDefinition("inputTables", DataTypes.arrayTypeName(HIVE_TABLE_TYPE),
Multiplicity.COLLECTION, false, null));
HierarchicalTypeDefinition<TraitType> dimTraitDef =
TypesUtil.createTraitTypeDef("Dimension", null);
HierarchicalTypeDefinition<TraitType> dimTraitDef = TypesUtil.createTraitTypeDef("Dimension", null);
HierarchicalTypeDefinition<TraitType> factTraitDef =
TypesUtil.createTraitTypeDef("Fact", null);
HierarchicalTypeDefinition<TraitType> factTraitDef = TypesUtil.createTraitTypeDef("Fact", null);
HierarchicalTypeDefinition<TraitType> metricTraitDef =
TypesUtil.createTraitTypeDef("Metric", null);
HierarchicalTypeDefinition<TraitType> metricTraitDef = TypesUtil.createTraitTypeDef("Metric", null);
HierarchicalTypeDefinition<TraitType> etlTraitDef =
TypesUtil.createTraitTypeDef("ETL", null);
HierarchicalTypeDefinition<TraitType> etlTraitDef = TypesUtil.createTraitTypeDef("ETL", null);
HierarchicalTypeDefinition<TraitType> piiTraitDef =
TypesUtil.createTraitTypeDef("PII", null);
HierarchicalTypeDefinition<TraitType> piiTraitDef = TypesUtil.createTraitTypeDef("PII", null);
HierarchicalTypeDefinition<TraitType> jdbcTraitDef =
TypesUtil.createTraitTypeDef("JdbcAccess", null);
HierarchicalTypeDefinition<TraitType> jdbcTraitDef = TypesUtil.createTraitTypeDef("JdbcAccess", null);
return TypeUtils.getTypesDef(
ImmutableList.<EnumTypeDefinition>of(),
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(dimTraitDef, factTraitDef,
piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef),
ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef,
tblClsDef, loadProcessClsDef, viewClsDef)
);
return TypeUtils.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(dimTraitDef, factTraitDef, piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef),
ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef, tblClsDef, loadProcessClsDef, viewClsDef));
}
AttributeDefinition attrDef(String name, IDataType dT) {
......@@ -405,88 +361,73 @@ public class HiveLineageServiceTest {
return attrDef(name, dT, m, false, null);
}
AttributeDefinition attrDef(String name, IDataType dT,
Multiplicity m, boolean isComposite, String reverseAttributeName) {
AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m, boolean isComposite,
String reverseAttributeName) {
Preconditions.checkNotNull(name);
Preconditions.checkNotNull(dT);
return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName);
}
private void setupInstances() throws Exception {
Id salesDB = database(
"Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales");
Id salesDB = database("Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales");
Referenceable sd = storageDescriptor("hdfs://host:8000/apps/warehouse/sales",
"TextInputFormat", "TextOutputFormat", true);
Referenceable sd =
storageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat", true);
List<Referenceable> salesFactColumns = ImmutableList.of(
column("time_id", "int", "time id"),
column("product_id", "int", "product id"),
List<Referenceable> salesFactColumns = ImmutableList
.of(column("time_id", "int", "time id"), column("product_id", "int", "product id"),
column("customer_id", "int", "customer id", "PII"),
column("sales", "double", "product id", "Metric")
);
column("sales", "double", "product id", "Metric"));
Id salesFact = table("sales_fact", "sales fact table",
salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact");
Id salesFact = table("sales_fact", "sales fact table", salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact");
List<Referenceable> timeDimColumns = ImmutableList.of(
column("time_id", "int", "time id"),
column("dayOfYear", "int", "day Of Year"),
column("weekDay", "int", "week Day")
);
List<Referenceable> timeDimColumns = ImmutableList
.of(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"),
column("weekDay", "int", "week Day"));
Id timeDim = table("time_dim", "time dimension table",
salesDB, sd, "John Doe", "External", timeDimColumns, "Dimension");
Id timeDim = table("time_dim", "time dimension table", salesDB, sd, "John Doe", "External", timeDimColumns,
"Dimension");
Id reportingDB = database("Reporting", "reporting database", "Jane BI",
"hdfs://host:8000/apps/warehouse/reporting");
Id reportingDB =
database("Reporting", "reporting database", "Jane BI", "hdfs://host:8000/apps/warehouse/reporting");
Id salesFactDaily = table("sales_fact_daily_mv",
"sales fact daily materialized view",
reportingDB, sd, "Joe BI", "Managed", salesFactColumns, "Metric");
Id salesFactDaily =
table("sales_fact_daily_mv", "sales fact daily materialized view", reportingDB, sd, "Joe BI", "Managed",
salesFactColumns, "Metric");
loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL",
ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily),
"create table as select ", "plan", "id", "graph",
"ETL");
loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL", ImmutableList.of(salesFact, timeDim),
ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL");
List<Referenceable> productDimColumns = ImmutableList.of(
column("product_id", "int", "product id"),
column("product_name", "string", "product name"),
column("brand_name", "int", "brand name")
);
List<Referenceable> productDimColumns = ImmutableList
.of(column("product_id", "int", "product id"), column("product_name", "string", "product name"),
column("brand_name", "int", "brand name"));
Id productDim = table("product_dim", "product dimension table",
salesDB, sd, "John Doe", "Managed", productDimColumns, "Dimension");
Id productDim =
table("product_dim", "product dimension table", salesDB, sd, "John Doe", "Managed", productDimColumns,
"Dimension");
view("product_dim_view", reportingDB,
ImmutableList.of(productDim), "Dimension", "JdbcAccess");
view("product_dim_view", reportingDB, ImmutableList.of(productDim), "Dimension", "JdbcAccess");
List<Referenceable> customerDimColumns = ImmutableList.of(
column("customer_id", "int", "customer id", "PII"),
List<Referenceable> customerDimColumns = ImmutableList.of(column("customer_id", "int", "customer id", "PII"),
column("name", "string", "customer name", "PII"),
column("address", "string", "customer address", "PII")
);
column("address", "string", "customer address", "PII"));
Id customerDim = table("customer_dim", "customer dimension table",
salesDB, sd, "fetl", "External", customerDimColumns, "Dimension");
Id customerDim =
table("customer_dim", "customer dimension table", salesDB, sd, "fetl", "External", customerDimColumns,
"Dimension");
view("customer_dim_view", reportingDB,
ImmutableList.of(customerDim), "Dimension", "JdbcAccess");
view("customer_dim_view", reportingDB, ImmutableList.of(customerDim), "Dimension", "JdbcAccess");
Id salesFactMonthly = table("sales_fact_monthly_mv",
"sales fact monthly materialized view",
reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric");
Id salesFactMonthly =
table("sales_fact_monthly_mv", "sales fact monthly materialized view", reportingDB, sd, "Jane BI",
"Managed", salesFactColumns, "Metric");
loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL",
ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly),
"create table as select ", "plan", "id", "graph",
"ETL");
loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", ImmutableList.of(salesFactDaily),
ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL");
}
Id database(String name, String description,
String owner, String locationUri,
String... traitNames) throws Exception {
Id database(String name, String description, String owner, String locationUri, String... traitNames)
throws Exception {
Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("description", description);
......@@ -497,9 +438,8 @@ public class HiveLineageServiceTest {
return createInstance(referenceable);
}
Referenceable storageDescriptor(String location, String inputFormat,
String outputFormat,
boolean compressed) throws Exception {
Referenceable storageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed)
throws Exception {
Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE);
referenceable.set("location", location);
referenceable.set("inputFormat", inputFormat);
......@@ -509,8 +449,7 @@ public class HiveLineageServiceTest {
return referenceable;
}
Referenceable column(String name, String dataType, String comment,
String... traitNames) throws Exception {
Referenceable column(String name, String dataType, String comment, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("dataType", dataType);
......@@ -519,11 +458,8 @@ public class HiveLineageServiceTest {
return referenceable;
}
Id table(String name, String description,
Id dbId, Referenceable sd,
String owner, String tableType,
List<Referenceable> columns,
String... traitNames) throws Exception {
Id table(String name, String description, Id dbId, Referenceable sd, String owner, String tableType,
List<Referenceable> columns, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("description", description);
......@@ -542,12 +478,9 @@ public class HiveLineageServiceTest {
return createInstance(referenceable);
}
Id loadProcess(String name, String description, String user,
List<Id> inputTables,
List<Id> outputTables,
String queryText, String queryPlan,
String queryId, String queryGraph,
String... traitNames) throws Exception {
Id loadProcess(String name, String description, String user, List<Id> inputTables, List<Id> outputTables,
String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames)
throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("description", description);
......@@ -566,9 +499,7 @@ public class HiveLineageServiceTest {
return createInstance(referenceable);
}
Id view(String name, Id dbId,
List<Id> inputTables,
String... traitNames) throws Exception {
Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("db", dbId);
......
......@@ -50,12 +50,11 @@ public abstract class BaseTest {
public static final String STRUCT_TYPE_1 = "t1";
public static final String STRUCT_TYPE_2 = "t2";
public static final String TEST_DATE = "2014-12-11T02:35:58.440Z";
public static final long TEST_DATE_IN_LONG=1418265358440L;
public static final long TEST_DATE_IN_LONG = 1418265358440L;
protected IRepository repo;
public static Struct createStruct() throws AtlasException {
StructType structType = (StructType) TypeSystem.getInstance()
.getDataType(StructType.class, STRUCT_TYPE_1);
StructType structType = (StructType) TypeSystem.getInstance().getDataType(StructType.class, STRUCT_TYPE_1);
Struct s = new Struct(structType.getName());
s.set("a", 1);
s.set("b", true);
......@@ -70,8 +69,7 @@ public abstract class BaseTest {
s.set("k", new BigDecimal(1));
s.set("l", new Date(1418265358440L));
s.set("m", Lists.<Integer>asList(Integer.valueOf(1), new Integer[]{Integer.valueOf(1)}));
s.set("n", Lists.<BigDecimal>asList(BigDecimal.valueOf(1.1),
new BigDecimal[]{BigDecimal.valueOf(1.1)}));
s.set("n", Lists.<BigDecimal>asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
Map<String, Double> hm = Maps.<String, Double>newHashMap();
hm.put("a", 1.0);
hm.put("b", 2.0);
......@@ -94,9 +92,8 @@ public abstract class BaseTest {
ts.reset();
repo = new MemRepository(ts);
StructType structType = ts.defineStructType(STRUCT_TYPE_1,
true,
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
StructType structType =
ts.defineStructType(STRUCT_TYPE_1, true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
......@@ -109,18 +106,16 @@ public abstract class BaseTest {
TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
TypesUtil.createOptionalAttrDef("o",
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), TypesUtil
.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)));
StructType recursiveStructType = ts.defineStructType(STRUCT_TYPE_2,
true,
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
StructType recursiveStructType =
ts.defineStructType(STRUCT_TYPE_2, true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2));
}
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs)
throws AtlasException {
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs) throws AtlasException {
return getTypeSystem().defineTraitTypes(tDefs);
}
......@@ -135,45 +130,33 @@ public abstract class BaseTest {
*/
protected void defineDeptEmployeeTypes(TypeSystem ts) throws AtlasException {
HierarchicalTypeDefinition<ClassType> deptTypeDef =
TypesUtil.createClassTypeDef("Department", ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> deptTypeDef = TypesUtil
.createClassTypeDef("Department", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, true, "department")
);
HierarchicalTypeDefinition<ClassType> personTypeDef =
TypesUtil.createClassTypeDef("Person", ImmutableList.<String>of(),
Multiplicity.COLLECTION, true, "department"));
HierarchicalTypeDefinition<ClassType> personTypeDef = TypesUtil
.createClassTypeDef("Person", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department",
"Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager",
"Manager", Multiplicity.OPTIONAL, false, "subordinates")
);
HierarchicalTypeDefinition<ClassType> managerTypeDef =
TypesUtil.createClassTypeDef("Manager",
ImmutableList.<String>of("Person"),
new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager")
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
TypesUtil.createTraitTypeDef("SecurityClearance",
ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"));
HierarchicalTypeDefinition<ClassType> managerTypeDef = TypesUtil
.createClassTypeDef("Manager", ImmutableList.<String>of("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager"));
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = TypesUtil
.createTraitTypeDef("SecurityClearance", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(securityClearanceTypeDef),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef,
managerTypeDef));
ImmutableList<HierarchicalType> types = ImmutableList.of(
ts.getDataType(HierarchicalType.class, "SecurityClearance"),
ts.getDataType(ClassType.class, "Department"),
ts.getDataType(ClassType.class, "Person"),
ts.getDataType(ClassType.class, "Manager")
);
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef, managerTypeDef));
ImmutableList<HierarchicalType> types = ImmutableList
.of(ts.getDataType(HierarchicalType.class, "SecurityClearance"),
ts.getDataType(ClassType.class, "Department"), ts.getDataType(ClassType.class, "Person"),
ts.getDataType(ClassType.class, "Manager"));
repo.defineTypes(types);
......
......@@ -128,7 +128,7 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertNotNull(entity);
}
@Test (expectedExceptions = RepositoryException.class)
@Test(expectedExceptions = RepositoryException.class)
public void testGetEntityDefinitionNonExistent() throws Exception {
repositoryService.getEntityDefinition("blah");
Assert.fail();
......@@ -144,15 +144,14 @@ public class GraphBackedMetadataRepositoryTest {
@Test
public void testGetTypeAttributeName() throws Exception {
Assert.assertEquals(
repositoryService.getTypeAttributeName(), Constants.ENTITY_TYPE_PROPERTY_KEY);
Assert.assertEquals(repositoryService.getTypeAttributeName(), Constants.ENTITY_TYPE_PROPERTY_KEY);
}
@Test (dependsOnMethods = "testSubmitEntity")
@Test(dependsOnMethods = "testSubmitEntity")
public void testGetTraitLabel() throws Exception {
Assert.assertEquals(repositoryService.getTraitLabel(
typeSystem.getDataType(ClassType.class, TABLE_TYPE),
CLASSIFICATION), TABLE_TYPE + "." + CLASSIFICATION);
Assert.assertEquals(
repositoryService.getTraitLabel(typeSystem.getDataType(ClassType.class, TABLE_TYPE), CLASSIFICATION),
TABLE_TYPE + "." + CLASSIFICATION);
}
@Test
......@@ -174,8 +173,7 @@ public class GraphBackedMetadataRepositoryTest {
String dbGUID = repositoryService.createEntity(db);
System.out.println("added db = " + dbGUID);
Referenceable dbInstance = new Referenceable(
dbGUID, DATABASE_TYPE, databaseInstance.getValuesMap());
Referenceable dbInstance = new Referenceable(dbGUID, DATABASE_TYPE, databaseInstance.getValuesMap());
ITypedReferenceableInstance table = createHiveTableInstance(dbInstance);
String tableGUID = repositoryService.createEntity(table);
......@@ -203,8 +201,7 @@ public class GraphBackedMetadataRepositoryTest {
private Vertex getTableEntityVertex() {
TitanGraph graph = graphProvider.get();
GraphQuery query = graph.query()
.has(Constants.ENTITY_TYPE_PROPERTY_KEY, Compare.EQUAL, TABLE_TYPE);
GraphQuery query = graph.query().has(Constants.ENTITY_TYPE_PROPERTY_KEY, Compare.EQUAL, TABLE_TYPE);
Iterator<Vertex> results = query.vertices().iterator();
// returning one since guid should be unique
Vertex tableVertex = results.hasNext() ? results.next() : null;
......@@ -215,7 +212,7 @@ public class GraphBackedMetadataRepositoryTest {
return tableVertex;
}
@Test (dependsOnMethods = "testCreateEntity")
@Test(dependsOnMethods = "testCreateEntity")
public void testGetTraitNames() throws Exception {
final List<String> traitNames = repositoryService.getTraitNames(getGUID());
Assert.assertEquals(traitNames.size(), 1);
......@@ -228,13 +225,13 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertEquals(traitNames.size(), 0);
}
@Test (expectedExceptions = EntityNotFoundException.class)
@Test(expectedExceptions = EntityNotFoundException.class)
public void testGetTraitNamesForBadEntity() throws Exception {
repositoryService.getTraitNames(UUID.randomUUID().toString());
Assert.fail();
}
@Test (dependsOnMethods = "testGetTraitNames")
@Test(dependsOnMethods = "testGetTraitNames")
public void testAddTrait() throws Exception {
final String aGUID = getGUID();
......@@ -244,8 +241,7 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertTrue(traitNames.contains(CLASSIFICATION));
Assert.assertFalse(traitNames.contains(PII));
HierarchicalTypeDefinition<TraitType> piiTrait =
TypesUtil.createTraitTypeDef(PII, ImmutableList.<String>of());
HierarchicalTypeDefinition<TraitType> piiTrait = TypesUtil.createTraitTypeDef(PII, ImmutableList.<String>of());
TraitType traitType = typeSystem.defineTraitType(piiTrait);
ITypedStruct traitInstance = traitType.createInstance();
......@@ -258,13 +254,13 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertTrue(traitNames.contains(CLASSIFICATION));
}
@Test (dependsOnMethods = "testAddTrait")
@Test(dependsOnMethods = "testAddTrait")
public void testAddTraitWithAttribute() throws Exception {
final String aGUID = getGUID();
final String traitName = "P_I_I";
HierarchicalTypeDefinition<TraitType> piiTrait =
TypesUtil.createTraitTypeDef(traitName, ImmutableList.<String>of(),
HierarchicalTypeDefinition<TraitType> piiTrait = TypesUtil
.createTraitTypeDef(traitName, ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
TraitType traitType = typeSystem.defineTraitType(piiTrait);
ITypedStruct traitInstance = traitType.createInstance();
......@@ -285,13 +281,13 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertEquals(type, "SSN");
}
@Test (expectedExceptions = NullPointerException.class)
@Test(expectedExceptions = NullPointerException.class)
public void testAddTraitWithNullInstance() throws Exception {
repositoryService.addTrait(getGUID(), null);
Assert.fail();
}
@Test (dependsOnMethods = "testAddTrait", expectedExceptions = RepositoryException.class)
@Test(dependsOnMethods = "testAddTrait", expectedExceptions = RepositoryException.class)
public void testAddTraitForBadEntity() throws Exception {
TraitType traitType = typeSystem.getDataType(TraitType.class, PII);
ITypedStruct traitInstance = traitType.createInstance();
......@@ -300,7 +296,7 @@ public class GraphBackedMetadataRepositoryTest {
Assert.fail();
}
@Test (dependsOnMethods = "testAddTrait")
@Test(dependsOnMethods = "testAddTrait")
public void testDeleteTrait() throws Exception {
final String aGUID = getGUID();
......@@ -319,20 +315,20 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertFalse(traitNames.contains(PII));
}
@Test (expectedExceptions = RepositoryException.class)
@Test(expectedExceptions = RepositoryException.class)
public void testDeleteTraitForNonExistentEntity() throws Exception {
repositoryService.deleteTrait(UUID.randomUUID().toString(), PII);
Assert.fail();
}
@Test (expectedExceptions = RepositoryException.class)
@Test(expectedExceptions = RepositoryException.class)
public void testDeleteTraitForNonExistentTrait() throws Exception {
final String aGUID = getGUID();
repositoryService.deleteTrait(aGUID, "PCI");
Assert.fail();
}
@Test (dependsOnMethods = "testCreateEntity")
@Test(dependsOnMethods = "testCreateEntity")
public void testGetIdFromVertex() throws Exception {
Vertex tableVertex = getTableEntityVertex();
......@@ -341,12 +337,11 @@ public class GraphBackedMetadataRepositoryTest {
Assert.fail();
}
Id expected = new Id(guid,
tableVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY), TABLE_TYPE);
Id expected = new Id(guid, tableVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY), TABLE_TYPE);
Assert.assertEquals(repositoryService.getIdFromVertex(TABLE_TYPE, tableVertex), expected);
}
@Test (dependsOnMethods = "testCreateEntity")
@Test(dependsOnMethods = "testCreateEntity")
public void testGetTypeName() throws Exception {
Vertex tableVertex = getTableEntityVertex();
Assert.assertEquals(repositoryService.getTypeName(tableVertex), TABLE_TYPE);
......@@ -414,9 +409,8 @@ public class GraphBackedMetadataRepositoryTest {
@Test(dependsOnMethods = "testCreateEntity")
public void testBug37860() throws Exception {
String dslQuery =
"hive_table as t where name = 'bar' " +
"database where name = 'foo' and description = 'foo database' select t";
String dslQuery = "hive_table as t where name = 'bar' "
+ "database where name = 'foo' and description = 'foo database' select t";
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
Assert.assertNotNull(jsonResults);
......@@ -475,114 +469,88 @@ public class GraphBackedMetadataRepositoryTest {
}
private void createHiveTypes() throws Exception {
HierarchicalTypeDefinition<ClassType> superTypeDefinition =
TypesUtil.createClassTypeDef(SUPER_TYPE_NAME,
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> superTypeDefinition = TypesUtil
.createClassTypeDef(SUPER_TYPE_NAME, ImmutableList.<String>of(),
TypesUtil.createOptionalAttrDef("namespace", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("cluster", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("colo", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
TypesUtil.createClassTypeDef(DATABASE_TYPE,
ImmutableList.of(SUPER_TYPE_NAME),
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition = TypesUtil
.createClassTypeDef(DATABASE_TYPE, ImmutableList.of(SUPER_TYPE_NAME),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("created", DataTypes.DATE_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
StructTypeDefinition structTypeDefinition =
new StructTypeDefinition("serdeType",
new AttributeDefinition[]{
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("serde", DataTypes.STRING_TYPE)
});
StructTypeDefinition structTypeDefinition = new StructTypeDefinition("serdeType",
new AttributeDefinition[]{TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("serde", DataTypes.STRING_TYPE)});
EnumValue values[] = {
new EnumValue("MANAGED", 1),
new EnumValue("EXTERNAL", 2),
};
EnumValue values[] = {new EnumValue("MANAGED", 1), new EnumValue("EXTERNAL", 2),};
EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("tableType", values);
typeSystem.defineEnumType(enumTypeDefinition);
HierarchicalTypeDefinition<ClassType> columnsDefinition =
TypesUtil.createClassTypeDef("column_type",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> columnsDefinition = TypesUtil
.createClassTypeDef("column_type", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
StructTypeDefinition partitionDefinition =
new StructTypeDefinition("partition_type",
new AttributeDefinition[]{
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
});
StructTypeDefinition partitionDefinition = new StructTypeDefinition("partition_type",
new AttributeDefinition[]{TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),});
HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
TypesUtil.createClassTypeDef(TABLE_TYPE,
ImmutableList.of(SUPER_TYPE_NAME),
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil
.createClassTypeDef(TABLE_TYPE, ImmutableList.of(SUPER_TYPE_NAME),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("created", DataTypes.DATE_TYPE),
// enum
new AttributeDefinition("tableType", "tableType",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("tableType", "tableType", Multiplicity.REQUIRED, false, null),
// array of strings
new AttributeDefinition("columnNames",
String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
Multiplicity.COLLECTION, false, null),
String.format("array<%s>", DataTypes.STRING_TYPE.getName()), Multiplicity.COLLECTION,
false, null),
// array of classes
new AttributeDefinition("columns",
String.format("array<%s>", "column_type"),
new AttributeDefinition("columns", String.format("array<%s>", "column_type"),
Multiplicity.COLLECTION, true, null),
// array of structs
new AttributeDefinition("partitions",
String.format("array<%s>", "partition_type"),
new AttributeDefinition("partitions", String.format("array<%s>", "partition_type"),
Multiplicity.COLLECTION, true, null),
// map of primitives
new AttributeDefinition("parametersMap",
DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
DataTypes.STRING_TYPE.getName()),
DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(), DataTypes.STRING_TYPE.getName()),
Multiplicity.COLLECTION, true, null),
// map of classes - todo - enable this
// new AttributeDefinition("columnsMap",
// DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
// "column_type"),
// Multiplicity.COLLECTION, true, null),
// new AttributeDefinition("columnsMap",
// DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
// "column_type"),
// Multiplicity.COLLECTION, true, null),
// map of structs todo - enable this
// new AttributeDefinition("partitionsMap",
// DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
// "partition_type"),
// Multiplicity.COLLECTION, true, null),
// new AttributeDefinition("partitionsMap",
// DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
// "partition_type"),
// Multiplicity.COLLECTION, true, null),
// struct reference
new AttributeDefinition("serde1",
"serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde2",
"serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde1", "serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde2", "serdeType", Multiplicity.REQUIRED, false, null),
// class reference
new AttributeDefinition("database",
DATABASE_TYPE, Multiplicity.REQUIRED, true, null)
);
new AttributeDefinition("database", DATABASE_TYPE, Multiplicity.REQUIRED, true, null));
HierarchicalTypeDefinition<TraitType> classificationTypeDefinition =
TypesUtil.createTraitTypeDef(CLASSIFICATION,
ImmutableList.<String>of(),
HierarchicalTypeDefinition<TraitType> classificationTypeDefinition = TypesUtil
.createTraitTypeDef(CLASSIFICATION, ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<TraitType> fetlClassificationTypeDefinition =
TypesUtil.createTraitTypeDef("fetl" + CLASSIFICATION,
ImmutableList.of(CLASSIFICATION),
HierarchicalTypeDefinition<TraitType> fetlClassificationTypeDefinition = TypesUtil
.createTraitTypeDef("fetl" + CLASSIFICATION, ImmutableList.of(CLASSIFICATION),
TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
typeSystem.defineTypes(
ImmutableList.of(structTypeDefinition, partitionDefinition),
typeSystem.defineTypes(ImmutableList.of(structTypeDefinition, partitionDefinition),
ImmutableList.of(classificationTypeDefinition, fetlClassificationTypeDefinition),
ImmutableList.of(superTypeDefinition, databaseTypeDefinition,
columnsDefinition, tableTypeDefinition));
ImmutableList.of(superTypeDefinition, databaseTypeDefinition, columnsDefinition, tableTypeDefinition));
}
private ITypedReferenceableInstance createHiveTableInstance(
Referenceable databaseInstance) throws Exception {
private ITypedReferenceableInstance createHiveTableInstance(Referenceable databaseInstance) throws Exception {
Referenceable tableInstance = new Referenceable(TABLE_TYPE, CLASSIFICATION);
tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table");
......@@ -630,7 +598,7 @@ public class GraphBackedMetadataRepositoryTest {
tableInstance.set("columns", columns);
// tableInstance.set("columnsMap", columnsMap);
// HashMap<String, Struct> partitionsMap = new HashMap<>();
// HashMap<String, Struct> partitionsMap = new HashMap<>();
ArrayList<Struct> partitions = new ArrayList<>();
for (int index = 0; index < 5; index++) {
Struct partitionInstance = new Struct("partition_type");
......@@ -638,10 +606,10 @@ public class GraphBackedMetadataRepositoryTest {
partitionInstance.set("name", name);
partitions.add(partitionInstance);
// partitionsMap.put(name, partitionInstance);
// partitionsMap.put(name, partitionInstance);
}
tableInstance.set("partitions", partitions);
// tableInstance.set("partitionsMap", partitionsMap);
// tableInstance.set("partitionsMap", partitionsMap);
HashMap<String, String> parametersMap = new HashMap<>();
parametersMap.put("foo", "bar");
......
......@@ -22,10 +22,7 @@ import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.TitanFactory;
import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.TitanIndexQuery;
import com.thinkaurelius.titan.core.schema.TitanGraphIndex;
import com.thinkaurelius.titan.diskstorage.BackendException;
import com.thinkaurelius.titan.diskstorage.configuration.Configuration;
import com.thinkaurelius.titan.diskstorage.configuration.ModifiableConfiguration;
import com.thinkaurelius.titan.diskstorage.configuration.ReadConfiguration;
import com.thinkaurelius.titan.diskstorage.configuration.backend.CommonsConfiguration;
import com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration;
......@@ -33,7 +30,6 @@ import com.tinkerpop.blueprints.Compare;
import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
......@@ -54,14 +50,11 @@ import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.apache.commons.io.FileUtils;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map;
import java.util.Random;
......@@ -73,7 +66,8 @@ public class GraphRepoMapperScaleTest {
private static final String TABLE_TYPE = "hive_table_type";
private static final String TABLE_NAME = "bar";
private static final String INDEX_DIR = System.getProperty("java.io.tmpdir", "/tmp") + "/atlas-test" + new Random().nextLong();
private static final String INDEX_DIR =
System.getProperty("java.io.tmpdir", "/tmp") + "/atlas-test" + new Random().nextLong();
private GraphProvider<TitanGraph> graphProvider = new GraphProvider<TitanGraph>() {
......@@ -130,7 +124,7 @@ public class GraphRepoMapperScaleTest {
graphProvider.get().shutdown();
try {
FileUtils.deleteDirectory(new File(INDEX_DIR));
} catch(IOException ioe) {
} catch (IOException ioe) {
System.err.println("Failed to cleanup index directory");
}
}
......@@ -147,8 +141,7 @@ public class GraphRepoMapperScaleTest {
dbGUID = repositoryService.createEntity(db);
Referenceable dbInstance = new Referenceable(
dbGUID, DATABASE_TYPE, databaseInstance.getValuesMap());
Referenceable dbInstance = new Referenceable(dbGUID, DATABASE_TYPE, databaseInstance.getValuesMap());
for (int index = 0; index < 1000; index++) {
ITypedReferenceableInstance table = createHiveTableInstance(dbInstance, index);
......@@ -175,14 +168,13 @@ public class GraphRepoMapperScaleTest {
long start = System.currentTimeMillis();
int count = 0;
try {
GraphQuery query = graph.query()
.has(key, Compare.EQUAL, value);
GraphQuery query = graph.query().has(key, Compare.EQUAL, value);
for (Vertex ignored : query.vertices()) {
count++;
}
} finally {
System.out.println("Search on [" + key + "=" + value + "] returned results: " + count
+ ", took " + (System.currentTimeMillis() - start) + " ms");
System.out.println("Search on [" + key + "=" + value + "] returned results: " + count + ", took " + (
System.currentTimeMillis() - start) + " ms");
}
}
......@@ -197,80 +189,62 @@ public class GraphRepoMapperScaleTest {
count++;
}
} finally {
System.out.println("Search on [" + key + "=" + value + "] returned results: " + count
+ ", took " + (System.currentTimeMillis() - start) + " ms");
System.out.println("Search on [" + key + "=" + value + "] returned results: " + count + ", took " + (
System.currentTimeMillis() - start) + " ms");
}
}
private void createHiveTypes() throws Exception {
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
TypesUtil.createClassTypeDef(DATABASE_TYPE,
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition = TypesUtil
.createClassTypeDef(DATABASE_TYPE, ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
StructTypeDefinition structTypeDefinition =
new StructTypeDefinition("hive_serde_type",
new AttributeDefinition[]{
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("serde", DataTypes.STRING_TYPE)
});
StructTypeDefinition structTypeDefinition = new StructTypeDefinition("hive_serde_type",
new AttributeDefinition[]{TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("serde", DataTypes.STRING_TYPE)});
EnumValue values[] = {
new EnumValue("MANAGED", 1),
new EnumValue("EXTERNAL", 2),
};
EnumValue values[] = {new EnumValue("MANAGED", 1), new EnumValue("EXTERNAL", 2),};
EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("table_type", values);
final EnumType enumType = typeSystem.defineEnumType(enumTypeDefinition);
HierarchicalTypeDefinition<ClassType> columnsDefinition =
TypesUtil.createClassTypeDef("hive_column_type",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> columnsDefinition = TypesUtil
.createClassTypeDef("hive_column_type", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
StructTypeDefinition partitionDefinition =
new StructTypeDefinition("hive_partition_type",
new AttributeDefinition[]{
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
});
StructTypeDefinition partitionDefinition = new StructTypeDefinition("hive_partition_type",
new AttributeDefinition[]{TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),});
HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
TypesUtil.createClassTypeDef(TABLE_TYPE,
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil
.createClassTypeDef(TABLE_TYPE, ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
// enum
new AttributeDefinition("tableType", "table_type",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("tableType", "table_type", Multiplicity.REQUIRED, false, null),
// array of strings
new AttributeDefinition("columnNames",
String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
Multiplicity.COLLECTION, false, null),
String.format("array<%s>", DataTypes.STRING_TYPE.getName()), Multiplicity.COLLECTION,
false, null),
// array of classes
new AttributeDefinition("columns",
String.format("array<%s>", "hive_column_type"),
new AttributeDefinition("columns", String.format("array<%s>", "hive_column_type"),
Multiplicity.COLLECTION, true, null),
// array of structs
new AttributeDefinition("partitions",
String.format("array<%s>", "hive_partition_type"),
new AttributeDefinition("partitions", String.format("array<%s>", "hive_partition_type"),
Multiplicity.COLLECTION, true, null),
// struct reference
new AttributeDefinition("serde1",
"hive_serde_type", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde2",
"hive_serde_type", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde1", "hive_serde_type", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde2", "hive_serde_type", Multiplicity.REQUIRED, false, null),
// class reference
new AttributeDefinition("database",
DATABASE_TYPE, Multiplicity.REQUIRED, true, null));
new AttributeDefinition("database", DATABASE_TYPE, Multiplicity.REQUIRED, true, null));
HierarchicalTypeDefinition<TraitType> classificationTypeDefinition =
TypesUtil.createTraitTypeDef("pii_type", ImmutableList.<String>of());
Map<String, IDataType> types = typeSystem.defineTypes(
ImmutableList.of(structTypeDefinition, partitionDefinition),
Map<String, IDataType> types = typeSystem
.defineTypes(ImmutableList.of(structTypeDefinition, partitionDefinition),
ImmutableList.of(classificationTypeDefinition),
ImmutableList.of(databaseTypeDefinition, columnsDefinition, tableTypeDefinition));
......@@ -281,8 +255,8 @@ public class GraphRepoMapperScaleTest {
searchIndexer.onAdd(typesAdded);
}
private ITypedReferenceableInstance createHiveTableInstance(
Referenceable databaseInstance, int uberIndex) throws Exception {
private ITypedReferenceableInstance createHiveTableInstance(Referenceable databaseInstance, int uberIndex)
throws Exception {
Referenceable tableInstance = new Referenceable(TABLE_TYPE, "pii_type");
tableInstance.set("name", TABLE_NAME + "-" + uberIndex);
......
......@@ -55,26 +55,16 @@ public class EnumTest extends BaseTest {
}
void defineEnums(TypeSystem ts) throws AtlasException {
ts.defineEnumType("HiveObjectType",
new EnumValue("GLOBAL", 1),
new EnumValue("DATABASE", 2),
new EnumValue("TABLE", 3),
new EnumValue("PARTITION", 4),
new EnumValue("COLUMN", 5));
ts.defineEnumType("HiveObjectType", new EnumValue("GLOBAL", 1), new EnumValue("DATABASE", 2),
new EnumValue("TABLE", 3), new EnumValue("PARTITION", 4), new EnumValue("COLUMN", 5));
ts.defineEnumType("PrincipalType",
new EnumValue("USER", 1),
new EnumValue("ROLE", 2),
ts.defineEnumType("PrincipalType", new EnumValue("USER", 1), new EnumValue("ROLE", 2),
new EnumValue("GROUP", 3));
ts.defineEnumType("TxnState",
new EnumValue("COMMITTED", 1),
new EnumValue("ABORTED", 2),
ts.defineEnumType("TxnState", new EnumValue("COMMITTED", 1), new EnumValue("ABORTED", 2),
new EnumValue("OPEN", 3));
ts.defineEnumType("LockLevel",
new EnumValue("DB", 1),
new EnumValue("TABLE", 2),
ts.defineEnumType("LockLevel", new EnumValue("DB", 1), new EnumValue("TABLE", 2),
new EnumValue("PARTITION", 3));
}
......@@ -93,8 +83,7 @@ public class EnumTest extends BaseTest {
s.set("k", new BigDecimal(1));
s.set("l", new Date(1418265358440L));
s.set("m", Lists.asList(1, new Integer[]{1}));
s.set("n",
Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
s.set("n", Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
Map<String, Double> hm = Maps.newHashMap();
hm.put("a", 1.0);
hm.put("b", 2.0);
......@@ -118,65 +107,54 @@ public class EnumTest extends BaseTest {
}
protected ClassType defineClassTypeWithEnum(TypeSystem ts) throws AtlasException {
return ts.defineClassType(TypesUtil.createClassTypeDef("t4",
ImmutableList.<String>of(),
return ts.defineClassType(TypesUtil.createClassTypeDef("t4", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
TypesUtil.createOptionalAttrDef("enum1",
ts.getDataType(EnumType.class, "HiveObjectType")),
TypesUtil.createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")),
TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
TypesUtil.createOptionalAttrDef("enum2",
ts.getDataType(EnumType.class, "PrincipalType")),
TypesUtil.createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")),
TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
TypesUtil
.createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
TypesUtil.createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
TypesUtil.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
TypesUtil.createOptionalAttrDef("enum4",
ts.getDataType(EnumType.class, "LockLevel"))));
TypesUtil.createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
TypesUtil.createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel"))));
}
@Test
public void testStruct() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineEnums(ts);
StructType structType = ts.defineStructType("t3",
true,
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
StructType structType =
ts.defineStructType("t3", true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
TypesUtil.createOptionalAttrDef("enum1",
ts.getDataType(EnumType.class, "HiveObjectType")),
TypesUtil.createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")),
TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
TypesUtil.createOptionalAttrDef("enum2",
ts.getDataType(EnumType.class, "PrincipalType")),
TypesUtil.createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")),
TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
TypesUtil
.createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
TypesUtil.createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
TypesUtil.createOptionalAttrDef("o",
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), TypesUtil
.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
TypesUtil.createOptionalAttrDef("enum4",
ts.getDataType(EnumType.class, "LockLevel")));
TypesUtil.createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel")));
Struct s = createStructWithEnum("t3");
ITypedStruct typedS = structType.convert(s, Multiplicity.REQUIRED);
......
......@@ -44,43 +44,36 @@ import java.util.List;
public class InstanceE2ETest extends BaseTest {
protected List<HierarchicalTypeDefinition> createHiveTypes(TypeSystem typeSystem)
throws AtlasException {
protected List<HierarchicalTypeDefinition> createHiveTypes(TypeSystem typeSystem) throws AtlasException {
ArrayList<HierarchicalTypeDefinition> typeDefinitions = new ArrayList<>();
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
TypesUtil.createClassTypeDef("hive_database",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition = TypesUtil
.createClassTypeDef("hive_database", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
typeDefinitions.add(databaseTypeDefinition);
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil.createClassTypeDef(
"hive_table",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil
.createClassTypeDef("hive_table", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
new AttributeDefinition("hive_database",
"hive_database", Multiplicity.REQUIRED, false, "hive_database"));
new AttributeDefinition("hive_database", "hive_database", Multiplicity.REQUIRED, false,
"hive_database"));
typeDefinitions.add(tableTypeDefinition);
HierarchicalTypeDefinition<TraitType> fetlTypeDefinition = TypesUtil.createTraitTypeDef(
"hive_fetl",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<TraitType> fetlTypeDefinition = TypesUtil
.createTraitTypeDef("hive_fetl", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE));
typeDefinitions.add(fetlTypeDefinition);
typeSystem.defineTypes(
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(fetlTypeDefinition),
typeSystem.defineTypes(ImmutableList.<StructTypeDefinition>of(), ImmutableList.of(fetlTypeDefinition),
ImmutableList.of(databaseTypeDefinition, tableTypeDefinition));
return typeDefinitions;
}
protected Referenceable createHiveTableReferenceable()
throws AtlasException {
protected Referenceable createHiveTableReferenceable() throws AtlasException {
Referenceable databaseInstance = new Referenceable("hive_database");
databaseInstance.set("name", "hive_database");
databaseInstance.set("description", "foo database");
......@@ -99,8 +92,7 @@ public class InstanceE2ETest extends BaseTest {
return tableInstance;
}
protected ITypedReferenceableInstance createHiveTableInstance(TypeSystem typeSystem)
throws AtlasException {
protected ITypedReferenceableInstance createHiveTableInstance(TypeSystem typeSystem) throws AtlasException {
ClassType tableType = typeSystem.getDataType(ClassType.class, "hive_table");
return tableType.convert(createHiveTableReferenceable(), Multiplicity.REQUIRED);
}
......@@ -112,8 +104,7 @@ public class InstanceE2ETest extends BaseTest {
createHiveTypes(ts);
String jsonStr = TypesSerialization$.MODULE$
.toJson(ts, ImmutableList.of("hive_database", "hive_table"));
String jsonStr = TypesSerialization$.MODULE$.toJson(ts, ImmutableList.of("hive_database", "hive_table"));
System.out.println(jsonStr);
TypesDef typesDef1 = TypesSerialization$.MODULE$.fromJson(jsonStr);
......@@ -121,8 +112,7 @@ public class InstanceE2ETest extends BaseTest {
ts.reset();
ts.defineTypes(typesDef1);
jsonStr = TypesSerialization$.MODULE$
.toJson(ts, ImmutableList.of("hive_database", "hive_table"));
jsonStr = TypesSerialization$.MODULE$.toJson(ts, ImmutableList.of("hive_database", "hive_table"));
System.out.println(jsonStr);
}
......
......@@ -38,8 +38,7 @@ public class StructTest extends BaseTest {
public void setup() throws Exception {
super.setup();
structType = (StructType) getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_1);
recursiveStructType = (StructType) getTypeSystem()
.getDataType(StructType.class, STRUCT_TYPE_2);
recursiveStructType = (StructType) getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_2);
}
@Test
......
......@@ -65,15 +65,13 @@ public class TraitTest extends BaseTest {
*/
@Test
public void test1() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createTraitTypeDef("C", ImmutableList.<String>of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
......@@ -155,15 +153,13 @@ public class TraitTest extends BaseTest {
@Test
public void testRandomOrder() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createTraitTypeDef("C", ImmutableList.<String>of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
......
......@@ -23,8 +23,8 @@ import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import junit.framework.Assert;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.AtlasException;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.TestUtils;
import org.apache.atlas.repository.graph.GraphHelper;
......@@ -80,7 +80,7 @@ public class GraphBackedTypeStoreTest {
}
}
@Test (dependsOnMethods = "testStore")
@Test(dependsOnMethods = "testStore")
public void testRestore() throws Exception {
TypesDef types = typeStore.restore();
......
......@@ -67,8 +67,7 @@ public class Referenceable extends Struct implements IReferenceableInstance {
* @param values
*/
@InterfaceAudience.Private
public Referenceable(String guid, String typeName, Map<String, Object> values,
List<String> _traitNames,
public Referenceable(String guid, String typeName, Map<String, Object> values, List<String> _traitNames,
Map<String, IStruct> _traits) {
super(typeName, values);
id = new Id(guid, 0, typeName);
......
......@@ -31,8 +31,7 @@ public class DownCastStructInstance implements IStruct {
public final DownCastFieldMapping fieldMapping;
public final IStruct backingInstance;
public DownCastStructInstance(String typeName, DownCastFieldMapping fieldMapping,
IStruct backingInstance) {
public DownCastStructInstance(String typeName, DownCastFieldMapping fieldMapping, IStruct backingInstance) {
this.typeName = typeName;
this.fieldMapping = fieldMapping;
this.backingInstance = backingInstance;
......@@ -60,7 +59,7 @@ public class DownCastStructInstance implements IStruct {
@Override
public Map<String, Object> getValuesMap() throws AtlasException {
Map<String,Object> m = new HashMap<>();
Map<String, Object> m = new HashMap<>();
for (String attr : fieldMapping.fieldNameMap.keySet()) {
m.put(attr, get(attr));
}
......
......@@ -70,8 +70,7 @@ public class Id implements ITypedReferenceableInstance {
}
public String toString() {
return String
.format("(type: %s, id: %s)", className, isUnassigned() ? "<unassigned>" : "" + id);
return String.format("(type: %s, id: %s)", className, isUnassigned() ? "<unassigned>" : "" + id);
}
public String getClassName() {
......@@ -88,14 +87,24 @@ public class Id implements ITypedReferenceableInstance {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Id id1 = (Id) o;
if (version != id1.version) return false;
if (!className.equals(id1.className)) return false;
if (!id.equals(id1.id)) return false;
if (version != id1.version) {
return false;
}
if (!className.equals(id1.className)) {
return false;
}
if (!id.equals(id1.id)) {
return false;
}
return true;
}
......
......@@ -59,8 +59,7 @@ public class MapIds implements ObjectGraphWalker.NodeProcessor {
}
} else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
DataTypes.ArrayType aT = (DataTypes.ArrayType) nd.aInfo.dataType();
Object v = aT
.mapIds((ImmutableCollection) nd.value, nd.aInfo.multiplicity, idToNewIdMap);
Object v = aT.mapIds((ImmutableCollection) nd.value, nd.aInfo.multiplicity, idToNewIdMap);
nd.instance.set(nd.attributeName, v);
} else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
DataTypes.MapType mT = (DataTypes.MapType) nd.aInfo.dataType();
......
......@@ -40,20 +40,12 @@ public class ReferenceableInstance extends StructInstance implements ITypedRefer
private Id id;
public ReferenceableInstance(Id id, String dataTypeName, FieldMapping fieldMapping,
boolean[] nullFlags,
boolean[] bools, byte[] bytes, short[] shorts, int[] ints,
long[] longs,
float[] floats, double[] doubles, BigDecimal[] bigDecimals,
BigInteger[] bigIntegers, Date[] dates, String[] strings,
ImmutableList<Object>[] arrays,
ImmutableMap<Object, Object>[] maps,
StructInstance[] structs,
ReferenceableInstance[] referenceableInstances,
Id[] ids,
ImmutableMap<String, ITypedStruct> traits) {
super(dataTypeName, fieldMapping, nullFlags, bools, bytes, shorts, ints, longs, floats,
doubles, bigDecimals,
public ReferenceableInstance(Id id, String dataTypeName, FieldMapping fieldMapping, boolean[] nullFlags,
boolean[] bools, byte[] bytes, short[] shorts, int[] ints, long[] longs, float[] floats, double[] doubles,
BigDecimal[] bigDecimals, BigInteger[] bigIntegers, Date[] dates, String[] strings,
ImmutableList<Object>[] arrays, ImmutableMap<Object, Object>[] maps, StructInstance[] structs,
ReferenceableInstance[] referenceableInstances, Id[] ids, ImmutableMap<String, ITypedStruct> traits) {
super(dataTypeName, fieldMapping, nullFlags, bools, bytes, shorts, ints, longs, floats, doubles, bigDecimals,
bigIntegers, dates, strings, arrays, maps, structs, referenceableInstances, ids);
this.id = id;
this.traits = traits;
......
......@@ -60,15 +60,11 @@ public class StructInstance implements ITypedStruct {
public final ReferenceableInstance[] referenceables;
public final Id[] ids;
public StructInstance(String dataTypeName, FieldMapping fieldMapping,
boolean[] nullFlags, boolean[] bools, byte[] bytes, short[] shorts,
int[] ints,
long[] longs, float[] floats, double[] doubles,
BigDecimal[] bigDecimals, BigInteger[] bigIntegers, Date[] dates,
String[] strings,
ImmutableList<Object>[] arrays, ImmutableMap<Object, Object>[] maps,
StructInstance[] structs, ReferenceableInstance[] referenceables,
Id[] ids) {
public StructInstance(String dataTypeName, FieldMapping fieldMapping, boolean[] nullFlags, boolean[] bools,
byte[] bytes, short[] shorts, int[] ints, long[] longs, float[] floats, double[] doubles,
BigDecimal[] bigDecimals, BigInteger[] bigIntegers, Date[] dates, String[] strings,
ImmutableList<Object>[] arrays, ImmutableMap<Object, Object>[] maps, StructInstance[] structs,
ReferenceableInstance[] referenceables, Id[] ids) {
assert dataTypeName != null;
this.dataTypeName = dataTypeName;
this.fieldMapping = fieldMapping;
......@@ -115,8 +111,7 @@ public class StructInstance implements ITypedStruct {
Object cVal = null;
if (val != null && val instanceof Id) {
ClassType clsType =
TypeSystem.getInstance().getDataType(ClassType.class, i.dataType().getName());
ClassType clsType = TypeSystem.getInstance().getDataType(ClassType.class, i.dataType().getName());
clsType.validateId((Id) val);
cVal = val;
} else {
......@@ -155,8 +150,8 @@ public class StructInstance implements ITypedStruct {
arrays[pos] = (ImmutableList) cVal;
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
maps[pos] = (ImmutableMap) cVal;
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT ||
i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT
|| i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
structs[pos] = (StructInstance) cVal;
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
if (cVal instanceof Id) {
......@@ -172,8 +167,7 @@ public class StructInstance implements ITypedStruct {
public Object get(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
int nullPos = fieldMapping.fieldNullPos.get(attrName);
......@@ -210,8 +204,8 @@ public class StructInstance implements ITypedStruct {
return arrays[pos];
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
return maps[pos];
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT ||
i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT
|| i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
return structs[pos];
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
if (ids[pos] != null) {
......@@ -227,8 +221,7 @@ public class StructInstance implements ITypedStruct {
public void setNull(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
int nullPos = fieldMapping.fieldNullPos.get(attrName);
nullFlags[nullPos] = true;
......@@ -241,7 +234,7 @@ public class StructInstance implements ITypedStruct {
@Override
public Map<String, Object> getValuesMap() throws AtlasException {
Map<String,Object> m = new HashMap<>();
Map<String, Object> m = new HashMap<>();
for (String attr : fieldMapping.fields.keySet()) {
m.put(attr, get(attr));
}
......@@ -251,14 +244,13 @@ public class StructInstance implements ITypedStruct {
public boolean getBoolean(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.BOOLEAN_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName(), DataTypes.BOOLEAN_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
getTypeName(), DataTypes.BOOLEAN_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -274,14 +266,13 @@ public class StructInstance implements ITypedStruct {
public byte getByte(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.BYTE_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName(), DataTypes.BYTE_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
getTypeName(), DataTypes.BYTE_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -297,14 +288,13 @@ public class StructInstance implements ITypedStruct {
public short getShort(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.SHORT_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName(), DataTypes.SHORT_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
getTypeName(), DataTypes.SHORT_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -320,15 +310,14 @@ public class StructInstance implements ITypedStruct {
public int getInt(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.INT_TYPE && !(i.dataType() instanceof EnumType)) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName(), DataTypes.INT_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
getTypeName(), DataTypes.INT_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -344,14 +333,13 @@ public class StructInstance implements ITypedStruct {
public long getLong(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.LONG_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName(), DataTypes.LONG_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
getTypeName(), DataTypes.LONG_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -367,14 +355,13 @@ public class StructInstance implements ITypedStruct {
public float getFloat(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.FLOAT_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName(), DataTypes.FLOAT_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
getTypeName(), DataTypes.FLOAT_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -390,14 +377,13 @@ public class StructInstance implements ITypedStruct {
public double getDouble(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.DOUBLE_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName(), DataTypes.DOUBLE_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
getTypeName(), DataTypes.DOUBLE_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -413,14 +399,13 @@ public class StructInstance implements ITypedStruct {
public BigInteger getBigInt(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.BIGINTEGER_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName(), DataTypes.BIGINTEGER_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
getTypeName(), DataTypes.BIGINTEGER_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -436,14 +421,13 @@ public class StructInstance implements ITypedStruct {
public BigDecimal getBigDecimal(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.BIGDECIMAL_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName(), DataTypes.BIGDECIMAL_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
getTypeName(), DataTypes.BIGDECIMAL_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -459,14 +443,13 @@ public class StructInstance implements ITypedStruct {
public Date getDate(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.DATE_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName(), DataTypes.DATE_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
getTypeName(), DataTypes.DATE_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -482,14 +465,13 @@ public class StructInstance implements ITypedStruct {
public String getString(String attrName) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.STRING_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName(), DataTypes.STRING_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
getTypeName(), DataTypes.STRING_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -505,14 +487,13 @@ public class StructInstance implements ITypedStruct {
public void setBoolean(String attrName, boolean val) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.BOOLEAN_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName(), DataTypes.BOOLEAN_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
getTypeName(), DataTypes.BOOLEAN_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -525,14 +506,13 @@ public class StructInstance implements ITypedStruct {
public void setByte(String attrName, byte val) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.BYTE_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName(), DataTypes.BYTE_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
getTypeName(), DataTypes.BYTE_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -545,14 +525,13 @@ public class StructInstance implements ITypedStruct {
public void setShort(String attrName, short val) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.SHORT_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName(), DataTypes.SHORT_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
getTypeName(), DataTypes.SHORT_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -565,14 +544,13 @@ public class StructInstance implements ITypedStruct {
public void setInt(String attrName, int val) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.INT_TYPE && !(i.dataType() instanceof EnumType)) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName(), DataTypes.INT_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
getTypeName(), DataTypes.INT_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -585,14 +563,13 @@ public class StructInstance implements ITypedStruct {
public void setLong(String attrName, long val) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.LONG_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName(), DataTypes.LONG_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
getTypeName(), DataTypes.LONG_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -605,14 +582,13 @@ public class StructInstance implements ITypedStruct {
public void setFloat(String attrName, float val) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.FLOAT_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName(), DataTypes.FLOAT_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
getTypeName(), DataTypes.FLOAT_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -625,14 +601,13 @@ public class StructInstance implements ITypedStruct {
public void setDouble(String attrName, double val) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.DOUBLE_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName(), DataTypes.DOUBLE_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
getTypeName(), DataTypes.DOUBLE_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -645,14 +620,13 @@ public class StructInstance implements ITypedStruct {
public void setBigInt(String attrName, BigInteger val) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.BIGINTEGER_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName(), DataTypes.BIGINTEGER_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
getTypeName(), DataTypes.BIGINTEGER_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -665,14 +639,13 @@ public class StructInstance implements ITypedStruct {
public void setBigDecimal(String attrName, BigDecimal val) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.BIGDECIMAL_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName(), DataTypes.BIGDECIMAL_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
getTypeName(), DataTypes.BIGDECIMAL_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -685,14 +658,13 @@ public class StructInstance implements ITypedStruct {
public void setDate(String attrName, Date val) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.DATE_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName(), DataTypes.DATE_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
getTypeName(), DataTypes.DATE_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......@@ -705,14 +677,13 @@ public class StructInstance implements ITypedStruct {
public void setString(String attrName, String val) throws AtlasException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
throw new AtlasException(
String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
}
if (i.dataType() != DataTypes.STRING_TYPE) {
throw new AtlasException(
String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName(), DataTypes.STRING_TYPE.getName()));
String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
getTypeName(), DataTypes.STRING_TYPE.getName()));
}
int pos = fieldMapping.fieldPos.get(attrName);
......
......@@ -35,15 +35,14 @@ public final class AttributeDefinition {
*/
public final String reverseAttributeName;
public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity,
boolean isComposite, String reverseAttributeName) {
public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity, boolean isComposite,
String reverseAttributeName) {
this(name, dataTypeName, multiplicity, isComposite, false, true, reverseAttributeName);
}
public AttributeDefinition(String name, String dataTypeName,
Multiplicity multiplicity, boolean isComposite, boolean isUnique,
boolean isIndexable, String reverseAttributeName) {
public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity, boolean isComposite,
boolean isUnique, boolean isIndexable, String reverseAttributeName) {
this.name = ParamChecker.notEmpty(name, "Attribute name");
this.dataTypeName = ParamChecker.notEmpty(dataTypeName, "Attribute type");
this.multiplicity = multiplicity;
......@@ -55,21 +54,37 @@ public final class AttributeDefinition {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AttributeDefinition that = (AttributeDefinition) o;
if (isComposite != that.isComposite) return false;
if (isUnique != that.isUnique) return false;
if (isIndexable != that.isIndexable) return false;
if (!dataTypeName.equals(that.dataTypeName)) return false;
if (!multiplicity.equals(that.multiplicity)) return false;
if (!name.equals(that.name)) return false;
if (reverseAttributeName != null
? !reverseAttributeName.equals(that.reverseAttributeName)
: that.reverseAttributeName != null)
if (isComposite != that.isComposite) {
return false;
}
if (isUnique != that.isUnique) {
return false;
}
if (isIndexable != that.isIndexable) {
return false;
}
if (!dataTypeName.equals(that.dataTypeName)) {
return false;
}
if (!multiplicity.equals(that.multiplicity)) {
return false;
}
if (!name.equals(that.name)) {
return false;
}
if (reverseAttributeName != null ? !reverseAttributeName.equals(that.reverseAttributeName) :
that.reverseAttributeName != null) {
return false;
}
return true;
}
......
......@@ -39,8 +39,9 @@ public class AttributeInfo {
AttributeInfo(TypeSystem t, AttributeDefinition def, Map<String, IDataType> tempTypes) throws AtlasException {
this.name = def.name;
this.dataType = (tempTypes != null && tempTypes.containsKey(def.dataTypeName)) ?
tempTypes.get(def.dataTypeName) : t.getDataType(IDataType.class, def.dataTypeName);
this.dataType =
(tempTypes != null && tempTypes.containsKey(def.dataTypeName)) ? tempTypes.get(def.dataTypeName) :
t.getDataType(IDataType.class, def.dataTypeName);
this.multiplicity = def.multiplicity;
this.isComposite = def.isComposite;
this.isUnique = def.isUnique;
......
......@@ -53,8 +53,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
infoToNameMap = null;
}
ClassType(TypeSystem typeSystem, String name, ImmutableList<String> superTypes,
AttributeInfo... fields)
ClassType(TypeSystem typeSystem, String name, ImmutableList<String> superTypes, AttributeInfo... fields)
throws AtlasException {
super(typeSystem, ClassType.class, name, superTypes, fields);
infoToNameMap = TypeUtils.buildAttrInfoToNameMap(fieldMapping);
......@@ -71,8 +70,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
if (isSubType(cType.getName())) {
return;
}
throw new AtlasException(
String.format("Id %s is not valid for class %s", id, getName()));
throw new AtlasException(String.format("Id %s is not valid for class %s", id, getName()));
}
}
......@@ -84,8 +82,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
}
@Override
public ITypedReferenceableInstance convert(Object val, Multiplicity m)
throws AtlasException {
public ITypedReferenceableInstance convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof ITypedReferenceableInstance) {
......@@ -122,9 +119,9 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
id = r.getId();
}
ITypedReferenceableInstance tr = r != null ?
createInstanceWithTraits(id, r, r.getTraits().toArray(new String[0]))
: createInstance(id);
ITypedReferenceableInstance tr =
r != null ? createInstanceWithTraits(id, r, r.getTraits().toArray(new String[0])) :
createInstance(id);
if (id != null && id.isAssigned()) {
return tr;
......@@ -134,8 +131,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
String attrKey = e.getKey();
AttributeInfo i = e.getValue();
Object aVal = s.get(attrKey);
if (aVal != null &&
i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
if (aVal != null && i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
if (!i.isComposite) {
aVal = ((IReferenceableInstance) aVal).getId();
}
......@@ -164,25 +160,21 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
@Override
public ITypedReferenceableInstance createInstance() throws AtlasException {
return createInstance((String[])null);
return createInstance((String[]) null);
}
public ITypedReferenceableInstance createInstance(String... traitNames)
throws AtlasException {
public ITypedReferenceableInstance createInstance(String... traitNames) throws AtlasException {
return createInstance(null, traitNames);
}
public ITypedReferenceableInstance createInstance(Id id, String... traitNames)
throws AtlasException {
public ITypedReferenceableInstance createInstance(Id id, String... traitNames) throws AtlasException {
return createInstanceWithTraits(id, null, traitNames);
}
public ITypedReferenceableInstance createInstanceWithTraits(Id id, Referenceable r,
String... traitNames)
public ITypedReferenceableInstance createInstanceWithTraits(Id id, Referenceable r, String... traitNames)
throws AtlasException {
ImmutableMap.Builder<String, ITypedStruct> b
= new ImmutableBiMap.Builder<String, ITypedStruct>();
ImmutableMap.Builder<String, ITypedStruct> b = new ImmutableBiMap.Builder<String, ITypedStruct>();
if (traitNames != null) {
for (String t : traitNames) {
TraitType tType = typeSystem.getDataType(TraitType.class, t);
......@@ -193,9 +185,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
}
}
return new ReferenceableInstance(id == null ? new Id(getName()) : id,
getName(),
fieldMapping,
return new ReferenceableInstance(id == null ? new Id(getName()) : id, getName(), fieldMapping,
new boolean[fieldMapping.fields.size()],
fieldMapping.numBools == 0 ? null : new boolean[fieldMapping.numBools],
fieldMapping.numBytes == 0 ? null : new byte[fieldMapping.numBytes],
......@@ -204,23 +194,19 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
fieldMapping.numLongs == 0 ? null : new long[fieldMapping.numLongs],
fieldMapping.numFloats == 0 ? null : new float[fieldMapping.numFloats],
fieldMapping.numDoubles == 0 ? null : new double[fieldMapping.numDoubles],
fieldMapping.numBigDecimals == 0 ? null
: new BigDecimal[fieldMapping.numBigDecimals],
fieldMapping.numBigDecimals == 0 ? null : new BigDecimal[fieldMapping.numBigDecimals],
fieldMapping.numBigInts == 0 ? null : new BigInteger[fieldMapping.numBigInts],
fieldMapping.numDates == 0 ? null : new Date[fieldMapping.numDates],
fieldMapping.numStrings == 0 ? null : new String[fieldMapping.numStrings],
fieldMapping.numArrays == 0 ? null : new ImmutableList[fieldMapping.numArrays],
fieldMapping.numMaps == 0 ? null : new ImmutableMap[fieldMapping.numMaps],
fieldMapping.numStructs == 0 ? null : new StructInstance[fieldMapping.numStructs],
fieldMapping.numReferenceables == 0 ? null
: new ReferenceableInstance[fieldMapping.numReferenceables],
fieldMapping.numReferenceables == 0 ? null : new Id[fieldMapping.numReferenceables],
b.build());
fieldMapping.numReferenceables == 0 ? null : new ReferenceableInstance[fieldMapping.numReferenceables],
fieldMapping.numReferenceables == 0 ? null : new Id[fieldMapping.numReferenceables], b.build());
}
@Override
public void output(IReferenceableInstance s, Appendable buf, String prefix)
throws AtlasException {
public void output(IReferenceableInstance s, Appendable buf, String prefix) throws AtlasException {
fieldMapping.output(s, buf, prefix);
}
......
......@@ -62,8 +62,7 @@ public class DataTypes {
}
public static String mapTypeName(String keyTypeName, String valueTypeName) {
return String.format("%s%s,%s%s", MAP_TYPE_PREFIX,
keyTypeName, valueTypeName, MAP_TYPE_SUFFIX);
return String.format("%s%s,%s%s", MAP_TYPE_PREFIX, keyTypeName, valueTypeName, MAP_TYPE_SUFFIX);
}
public static String mapTypeName(IDataType keyType, IDataType valueType) {
......@@ -433,8 +432,8 @@ public class DataTypes {
@Override
public void output(Date val, Appendable buf, String prefix) throws AtlasException {
TypeUtils.outputVal(val == null ? "<null>" :
TypeSystem.getInstance().getDateFormat().format(val), buf, prefix);
TypeUtils.outputVal(val == null ? "<null>" : TypeSystem.getInstance().getDateFormat().format(val), buf,
prefix);
}
public Date nullValue() {
......@@ -502,19 +501,18 @@ public class DataTypes {
it = (Iterator) val;
}
if (it != null) {
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder()
: ImmutableList.builder();
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder() : ImmutableList.builder();
while (it.hasNext()) {
b.add(elemType.convert(it.next(),
TypeSystem.getInstance().allowNullsInCollections()
? Multiplicity.OPTIONAL : Multiplicity.REQUIRED));
TypeSystem.getInstance().allowNullsInCollections() ? Multiplicity.OPTIONAL :
Multiplicity.REQUIRED));
}
return m.isUnique ? b.build().asList() : b.build();
} else {
try {
return ImmutableList.of(elemType.convert(val,
TypeSystem.getInstance().allowNullsInCollections()
? Multiplicity.OPTIONAL : Multiplicity.REQUIRED));
TypeSystem.getInstance().allowNullsInCollections() ? Multiplicity.OPTIONAL :
Multiplicity.REQUIRED));
} catch (Exception e) {
throw new ValueConversionException(this, val, e);
}
......@@ -526,15 +524,13 @@ public class DataTypes {
return null;
}
public ImmutableCollection<?> mapIds(ImmutableCollection<?> val, Multiplicity m,
Map<Id, Id> transientToNewIds)
public ImmutableCollection<?> mapIds(ImmutableCollection<?> val, Multiplicity m, Map<Id, Id> transientToNewIds)
throws AtlasException {
if (val == null || elemType.getTypeCategory() != TypeCategory.CLASS) {
return val;
}
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder()
: ImmutableList.builder();
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder() : ImmutableList.builder();
Iterator it = val.iterator();
while (it.hasNext()) {
Object elem = it.next();
......@@ -600,11 +596,10 @@ public class DataTypes {
while (it.hasNext()) {
Map.Entry e = it.next();
b.put(keyType.convert(e.getKey(),
TypeSystem.getInstance().allowNullsInCollections()
? Multiplicity.OPTIONAL : Multiplicity.REQUIRED),
valueType.convert(e.getValue(),
TypeSystem.getInstance().allowNullsInCollections()
? Multiplicity.OPTIONAL : Multiplicity.REQUIRED));
TypeSystem.getInstance().allowNullsInCollections() ? Multiplicity.OPTIONAL :
Multiplicity.REQUIRED), valueType.convert(e.getValue(),
TypeSystem.getInstance().allowNullsInCollections() ? Multiplicity.OPTIONAL :
Multiplicity.REQUIRED));
}
return b.build();
} else {
......@@ -617,12 +612,11 @@ public class DataTypes {
return null;
}
public ImmutableMap<?, ?> mapIds(ImmutableMap val, Multiplicity m,
Map<Id, Id> transientToNewIds)
public ImmutableMap<?, ?> mapIds(ImmutableMap val, Multiplicity m, Map<Id, Id> transientToNewIds)
throws AtlasException {
if (val == null || (keyType.getTypeCategory() != TypeCategory.CLASS &&
valueType.getTypeCategory() != TypeCategory.CLASS)) {
if (val == null || (keyType.getTypeCategory() != TypeCategory.CLASS
&& valueType.getTypeCategory() != TypeCategory.CLASS)) {
return val;
}
ImmutableMap.Builder b = ImmutableMap.builder();
......
......@@ -30,8 +30,7 @@ public class DownCastFieldMapping {
this.fieldNameMap = fieldNameMap;
}
public void set(DownCastStructInstance s, String attrName, Object val)
throws AtlasException {
public void set(DownCastStructInstance s, String attrName, Object val) throws AtlasException {
String mappedNm = fieldNameMap.get(attrName);
if (mappedNm == null) {
......
......@@ -54,13 +54,13 @@ public class EnumType extends AbstractDataType<EnumValue> {
if (val != null) {
EnumValue e = null;
if (val instanceof EnumValue) {
e = valueMap.get(((EnumValue)val).value);
} else if ( val instanceof Integer || val instanceof BigInt) {
e = valueMap.get(((EnumValue) val).value);
} else if (val instanceof Integer || val instanceof BigInt) {
e = ordinalMap.get(val);
} else if ( val instanceof String) {
} else if (val instanceof String) {
e = valueMap.get(val);
} else if ( val instanceof Number ) {
e = ordinalMap.get(((Number)val).intValue());
} else if (val instanceof Number) {
e = ordinalMap.get(((Number) val).intValue());
}
if (e == null) {
......
......@@ -34,13 +34,21 @@ public final class EnumTypeDefinition {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EnumTypeDefinition that = (EnumTypeDefinition) o;
if (!Arrays.equals(enumValues, that.enumValues)) return false;
if (!name.equals(that.name)) return false;
if (!Arrays.equals(enumValues, that.enumValues)) {
return false;
}
if (!name.equals(that.name)) {
return false;
}
return true;
}
......
......@@ -32,13 +32,21 @@ public class EnumValue {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EnumValue enumValue = (EnumValue) o;
if (ordinal != enumValue.ordinal) return false;
if (!value.equals(enumValue.value)) return false;
if (ordinal != enumValue.ordinal) {
return false;
}
if (!value.equals(enumValue.value)) {
return false;
}
return true;
}
......
......@@ -47,12 +47,9 @@ public class FieldMapping {
public final int numReferenceables;
public FieldMapping(Map<String, AttributeInfo> fields, Map<String, Integer> fieldPos,
Map<String, Integer> fieldNullPos, int numBools, int numBytes,
int numShorts,
int numInts, int numLongs, int numFloats, int numDoubles, int numBigInts,
int numBigDecimals,
int numDates, int numStrings, int numArrays, int numMaps, int numStructs,
int numReferenceables) {
Map<String, Integer> fieldNullPos, int numBools, int numBytes, int numShorts, int numInts, int numLongs,
int numFloats, int numDoubles, int numBigInts, int numBigDecimals, int numDates, int numStrings,
int numArrays, int numMaps, int numStructs, int numReferenceables) {
this.fields = fields;
this.fieldPos = fieldPos;
this.fieldNullPos = fieldNullPos;
......@@ -73,8 +70,7 @@ public class FieldMapping {
this.numReferenceables = numReferenceables;
}
protected void outputFields(IStruct s, Appendable buf, String fieldPrefix)
throws AtlasException {
protected void outputFields(IStruct s, Appendable buf, String fieldPrefix) throws AtlasException {
for (Map.Entry<String, AttributeInfo> e : fields.entrySet()) {
String attrName = e.getKey();
AttributeInfo i = e.getValue();
......@@ -104,8 +100,7 @@ public class FieldMapping {
TypeUtils.outputVal("}", buf, prefix);
}
public void output(IReferenceableInstance s, Appendable buf, String prefix)
throws AtlasException {
public void output(IReferenceableInstance s, Appendable buf, String prefix) throws AtlasException {
if (s == null) {
TypeUtils.outputVal("<null>\n", buf, "");
return;
......
......@@ -61,8 +61,8 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
/**
* Used when creating a Type, to support recursive Structs.
*/
HierarchicalType(TypeSystem typeSystem, Class<ST> superTypeClass,
String name, ImmutableList<String> superTypes, int numFields) {
HierarchicalType(TypeSystem typeSystem, Class<ST> superTypeClass, String name, ImmutableList<String> superTypes,
int numFields) {
this.typeSystem = typeSystem;
this.superTypeClass = superTypeClass;
this.name = name;
......@@ -73,14 +73,12 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
this.attributeNameToType = null;
}
HierarchicalType(TypeSystem typeSystem, Class<ST> superTypeClass,
String name, ImmutableList<String> superTypes, AttributeInfo... fields)
throws AtlasException {
HierarchicalType(TypeSystem typeSystem, Class<ST> superTypeClass, String name, ImmutableList<String> superTypes,
AttributeInfo... fields) throws AtlasException {
this.typeSystem = typeSystem;
this.superTypeClass = superTypeClass;
this.name = name;
Pair<FieldMapping, ImmutableMap<String, String>> p = constructFieldMapping(superTypes,
fields);
Pair<FieldMapping, ImmutableMap<String, String>> p = constructFieldMapping(superTypes, fields);
this.fieldMapping = p.left;
this.attributeNameToType = p.right;
this.numFields = this.fieldMapping.fields.size();
......@@ -107,13 +105,11 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
return (cType == this || cType.superTypePaths.containsKey(getName()));
}
protected void setupSuperTypesGraph()
throws AtlasException {
protected void setupSuperTypesGraph() throws AtlasException {
setupSuperTypesGraph(superTypes);
}
private void setupSuperTypesGraph(ImmutableList<String> superTypes)
throws AtlasException {
private void setupSuperTypesGraph(ImmutableList<String> superTypes) throws AtlasException {
Map<String, List<Path>> superTypePaths = new HashMap<String, List<Path>>();
Map<String, Path> pathNameToPathMap = new HashMap<String, Path>();
Queue<Path> queue = new LinkedList<Path>();
......@@ -149,8 +145,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
}
protected Pair<FieldMapping, ImmutableMap<String, String>> constructFieldMapping(ImmutableList<String> superTypes,
AttributeInfo... fields)
throws AtlasException {
AttributeInfo... fields) throws AtlasException {
Map<String, AttributeInfo> fieldsMap = new LinkedHashMap<String, AttributeInfo>();
Map<String, Integer> fieldPos = new HashMap<String, Integer>();
......@@ -182,19 +177,16 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
ST superType = currentPath.typeName == getName() ? (ST) this :
(ST) typeSystem.getDataType(superTypeClass, currentPath.typeName);
ImmutableList<AttributeInfo> superTypeFields = superType == this ?
ImmutableList.copyOf(fields) : superType.immediateAttrs;
ImmutableList<AttributeInfo> superTypeFields =
superType == this ? ImmutableList.copyOf(fields) : superType.immediateAttrs;
Set<String> immediateFields = new HashSet<String>();
for (AttributeInfo i : superTypeFields) {
if (superType == this) {
if (immediateFields.contains(i.name)) {
throw new AtlasException(
String.format(
"Struct defintion cannot contain multiple fields with the" +
" same name %s",
i.name));
throw new AtlasException(String.format(
"Struct defintion cannot contain multiple fields with the" + " same name %s", i.name));
}
immediateFields.add(i.name);
}
......@@ -249,8 +241,8 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
fieldPos.put(attrName, numMaps);
numMaps++;
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT ||
i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT
|| i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
fieldPos.put(attrName, numStructs);
numStructs++;
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
......@@ -265,24 +257,10 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
this.superTypePaths = ImmutableMap.copyOf(superTypePaths);
this.pathNameToPathMap = ImmutableMap.copyOf(pathNameToPathMap);
FieldMapping fm = new FieldMapping(fieldsMap,
fieldPos,
fieldNullPos,
numBools,
numBytes,
numShorts,
numInts,
numLongs,
numFloats,
numDoubles,
numBigInts,
numBigDecimals,
numDates,
numStrings,
numArrays,
numMaps,
numStructs,
numReferenceables);
FieldMapping fm =
new FieldMapping(fieldsMap, fieldPos, fieldNullPos, numBools, numBytes, numShorts, numInts, numLongs,
numFloats, numDoubles, numBigInts, numBigDecimals, numDates, numStrings, numArrays, numMaps,
numStructs, numReferenceables);
return new Pair(fm, ImmutableMap.copyOf(attributeNameToType));
}
......@@ -290,31 +268,26 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
public IStruct castAs(IStruct s, String superTypeName) throws AtlasException {
if (!superTypePaths.containsKey(superTypeName)) {
throw new AtlasException(
String.format("Cannot downcast to %s from type %s", superTypeName, getName()));
throw new AtlasException(String.format("Cannot downcast to %s from type %s", superTypeName, getName()));
}
if (s != null) {
if (s.getTypeName() != getName()) {
throw new AtlasException(
String.format("Downcast called on wrong type %s, instance type is %s",
getName(), s.getTypeName()));
String.format("Downcast called on wrong type %s, instance type is %s", getName(),
s.getTypeName()));
}
List<Path> pathToSuper = superTypePaths.get(superTypeName);
if (pathToSuper.size() > 1) {
throw new AtlasException(
String.format(
"Cannot downcast called to %s, from %s: there are multiple paths " +
"to SuperType",
throw new AtlasException(String.format(
"Cannot downcast called to %s, from %s: there are multiple paths " + "to SuperType",
superTypeName, getName()));
}
ST superType = (ST) typeSystem.getDataType(superTypeClass, superTypeName);
Map<String, String> downCastMap = superType
.constructDowncastFieldMap(this, pathToSuper.get(0));
return new DownCastStructInstance(superTypeName,
new DownCastFieldMapping(ImmutableMap.copyOf(downCastMap)),
Map<String, String> downCastMap = superType.constructDowncastFieldMap(this, pathToSuper.get(0));
return new DownCastStructInstance(superTypeName, new DownCastFieldMapping(ImmutableMap.copyOf(downCastMap)),
s);
}
......@@ -347,14 +320,12 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
Iterator<Path> itr = pathIterator();
while (itr.hasNext()) {
Path p = itr.next();
Path pInSubType = (Path) subType.pathNameToPathMap
.get(p.pathName + "." + pathToSubTypeName);
Path pInSubType = (Path) subType.pathNameToPathMap.get(p.pathName + "." + pathToSubTypeName);
if (pInSubType.hiddenAttributeMap != null) {
for (Map.Entry<String, String> e : pInSubType.hiddenAttributeMap.entrySet()) {
String mappedInThisType =
p.hiddenAttributeMap != null ? p.hiddenAttributeMap.get(e.getKey())
: null;
p.hiddenAttributeMap != null ? p.hiddenAttributeMap.get(e.getKey()) : null;
if (mappedInThisType == null) {
dCMap.put(e.getKey(), e.getValue());
} else {
......@@ -393,8 +364,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
private final Path subTypePath;
/*
* name mapping for attributes hidden by a SubType.
*/
Map<String, String> hiddenAttributeMap;
*/ Map<String, String> hiddenAttributeMap;
Path(String typeName, Path childPath) throws AtlasException {
this.typeName = typeName;
......@@ -415,8 +385,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
}
public boolean contains(String typeName) {
return this.typeName.equals(typeName) ||
(subTypePath != null && subTypePath.contains(typeName));
return this.typeName.equals(typeName) || (subTypePath != null && subTypePath.contains(typeName));
}
public String pathString(String nodeSep) {
......@@ -435,8 +404,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
}
String addOverrideAttr(String name) {
hiddenAttributeMap = hiddenAttributeMap == null ? new HashMap<String, String>()
: hiddenAttributeMap;
hiddenAttributeMap = hiddenAttributeMap == null ? new HashMap<String, String>() : hiddenAttributeMap;
String oName = pathName + "." + name;
hiddenAttributeMap.put(name, oName);
return oName;
......
......@@ -36,16 +36,13 @@ public class HierarchicalTypeDefinition<T extends HierarchicalType> extends Stru
* @throws ClassNotFoundException
*/
@InterfaceAudience.Private
public HierarchicalTypeDefinition(String hierarchicalMetaTypeName,
String typeName, String[] superTypes,
AttributeDefinition[] attributeDefinitions)
throws ClassNotFoundException {
this((Class<T>) Class.forName(hierarchicalMetaTypeName),
typeName, ImmutableList.copyOf(superTypes), attributeDefinitions);
public HierarchicalTypeDefinition(String hierarchicalMetaTypeName, String typeName, String[] superTypes,
AttributeDefinition[] attributeDefinitions) throws ClassNotFoundException {
this((Class<T>) Class.forName(hierarchicalMetaTypeName), typeName, ImmutableList.copyOf(superTypes),
attributeDefinitions);
}
public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType,
String typeName, ImmutableList<String> superTypes,
public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType, String typeName, ImmutableList<String> superTypes,
AttributeDefinition[] attributeDefinitions) {
super(typeName, false, attributeDefinitions);
hierarchicalMetaTypeName = hierarchicalMetaType.getName();
......@@ -54,14 +51,24 @@ public class HierarchicalTypeDefinition<T extends HierarchicalType> extends Stru
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
HierarchicalTypeDefinition that = (HierarchicalTypeDefinition) o;
if (!hierarchicalMetaTypeName.equals(that.hierarchicalMetaTypeName)) return false;
if (!superTypes.equals(that.superTypes)) return false;
if (!hierarchicalMetaTypeName.equals(that.hierarchicalMetaTypeName)) {
return false;
}
if (!superTypes.equals(that.superTypes)) {
return false;
}
return true;
}
......
......@@ -31,6 +31,7 @@ public final class Multiplicity {
public final int lower;
public final int upper;
public final boolean isUnique;
public Multiplicity(int lower, int upper, boolean isUnique) {
assert lower >= 0;
assert upper >= 1;
......@@ -46,14 +47,24 @@ public final class Multiplicity {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Multiplicity that = (Multiplicity) o;
if (isUnique != that.isUnique) return false;
if (lower != that.lower) return false;
if (upper != that.upper) return false;
if (isUnique != that.isUnique) {
return false;
}
if (lower != that.lower) {
return false;
}
if (upper != that.upper) {
return false;
}
return true;
}
......
......@@ -39,8 +39,7 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
final TypeSystem typeSystem;
Set<Id> processedIds;
public ObjectGraphTraversal(TypeSystem typeSystem, IReferenceableInstance start)
throws AtlasException {
public ObjectGraphTraversal(TypeSystem typeSystem, IReferenceableInstance start) throws AtlasException {
this.typeSystem = typeSystem;
queue = new LinkedList<InstanceTuple>();
processedIds = new HashSet<Id>();
......@@ -56,8 +55,8 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
IDataType keyType = ((DataTypes.MapType) dT).getKeyType();
IDataType valueType = ((DataTypes.MapType) dT).getKeyType();
processMap(keyType, valueType, val);
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.STRUCT ||
dT.getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.STRUCT
|| dT.getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
processStruct(val);
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.CLASS) {
processReferenceableInstance(val);
......@@ -66,8 +65,8 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
}
void processMap(IDataType keyType, IDataType valueType, Object val) throws AtlasException {
if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE &&
valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE
&& valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
return;
}
......
......@@ -47,13 +47,11 @@ public class ObjectGraphWalker {
final NodeProcessor nodeProcessor;
Set<Id> processedIds;
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor)
throws AtlasException {
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor) throws AtlasException {
this(typeSystem, nodeProcessor, (IReferenceableInstance) null);
}
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor,
IReferenceableInstance start)
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor, IReferenceableInstance start)
throws AtlasException {
this.typeSystem = typeSystem;
this.nodeProcessor = nodeProcessor;
......@@ -65,8 +63,7 @@ public class ObjectGraphWalker {
}
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor,
List<? extends IReferenceableInstance> roots)
throws AtlasException {
List<? extends IReferenceableInstance> roots) throws AtlasException {
this.typeSystem = typeSystem;
this.nodeProcessor = nodeProcessor;
queue = new LinkedList<IReferenceableInstance>();
......@@ -96,8 +93,8 @@ public class ObjectGraphWalker {
IDataType keyType = ((DataTypes.MapType) dT).getKeyType();
IDataType valueType = ((DataTypes.MapType) dT).getKeyType();
visitMap(keyType, valueType, val);
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.STRUCT ||
dT.getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.STRUCT
|| dT.getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
visitStruct(val);
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.CLASS) {
visitReferenceableInstance(val);
......@@ -106,8 +103,8 @@ public class ObjectGraphWalker {
}
void visitMap(IDataType keyType, IDataType valueType, Object val) throws AtlasException {
if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE &&
valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE
&& valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
return;
}
......
......@@ -28,8 +28,7 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class StructType extends AbstractDataType<IStruct>
implements IConstructableType<IStruct, ITypedStruct> {
public class StructType extends AbstractDataType<IStruct> implements IConstructableType<IStruct, ITypedStruct> {
public final TypeSystem typeSystem;
public final String name;
......@@ -50,13 +49,11 @@ public class StructType extends AbstractDataType<IStruct>
this.handler = null;
}
protected StructType(TypeSystem typeSystem, String name,
ImmutableList<String> superTypes, AttributeInfo... fields)
protected StructType(TypeSystem typeSystem, String name, ImmutableList<String> superTypes, AttributeInfo... fields)
throws AtlasException {
this.typeSystem = typeSystem;
this.name = name;
this.fieldMapping = constructFieldMapping(superTypes,
fields);
this.fieldMapping = constructFieldMapping(superTypes, fields);
infoToNameMap = TypeUtils.buildAttrInfoToNameMap(this.fieldMapping);
this.numFields = this.fieldMapping.fields.size();
this.handler = new TypedStructHandler(this);
......@@ -71,8 +68,7 @@ public class StructType extends AbstractDataType<IStruct>
return name;
}
protected FieldMapping constructFieldMapping(ImmutableList<String> superTypes,
AttributeInfo... fields)
protected FieldMapping constructFieldMapping(ImmutableList<String> superTypes, AttributeInfo... fields)
throws AtlasException {
Map<String, AttributeInfo> fieldsMap = new LinkedHashMap<String, AttributeInfo>();
......@@ -97,9 +93,7 @@ public class StructType extends AbstractDataType<IStruct>
for (AttributeInfo i : fields) {
if (fieldsMap.containsKey(i.name)) {
throw new AtlasException(
String.format(
"Struct defintion cannot contain multiple fields with the same " +
"name %s",
String.format("Struct defintion cannot contain multiple fields with the same " + "name %s",
i.name));
}
fieldsMap.put(i.name, i);
......@@ -146,8 +140,8 @@ public class StructType extends AbstractDataType<IStruct>
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
fieldPos.put(i.name, numMaps);
numMaps++;
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT ||
i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT
|| i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
fieldPos.put(i.name, numStructs);
numStructs++;
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
......@@ -158,23 +152,8 @@ public class StructType extends AbstractDataType<IStruct>
}
}
return new FieldMapping(fieldsMap,
fieldPos,
fieldNullPos,
numBools,
numBytes,
numShorts,
numInts,
numLongs,
numFloats,
numDoubles,
numBigInts,
numBigDecimals,
numDates,
numStrings,
numArrays,
numMaps,
numStructs,
return new FieldMapping(fieldsMap, fieldPos, fieldNullPos, numBools, numBytes, numShorts, numInts, numLongs,
numFloats, numDoubles, numBigInts, numBigDecimals, numDates, numStrings, numArrays, numMaps, numStructs,
numReferenceables);
}
......
......@@ -42,13 +42,21 @@ public class StructTypeDefinition {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
StructTypeDefinition that = (StructTypeDefinition) o;
if (!Arrays.equals(attributeDefinitions, that.attributeDefinitions)) return false;
if (!typeName.equals(that.typeName)) return false;
if (!Arrays.equals(attributeDefinitions, that.attributeDefinitions)) {
return false;
}
if (!typeName.equals(that.typeName)) {
return false;
}
return true;
}
......
......@@ -35,15 +35,13 @@ public class TraitType extends HierarchicalType<TraitType, IStruct>
/**
* Used when creating a TraitType, to support recursive Structs.
*/
TraitType(TypeSystem typeSystem, String name, ImmutableList<String> superTraits,
int numFields) {
TraitType(TypeSystem typeSystem, String name, ImmutableList<String> superTraits, int numFields) {
super(typeSystem, TraitType.class, name, superTraits, numFields);
handler = null;
infoToNameMap = null;
}
TraitType(TypeSystem typeSystem, String name, ImmutableList<String> superTraits,
AttributeInfo... fields)
TraitType(TypeSystem typeSystem, String name, ImmutableList<String> superTraits, AttributeInfo... fields)
throws AtlasException {
super(typeSystem, TraitType.class, name, superTraits, fields);
handler = new TypedStructHandler(this);
......
......@@ -29,7 +29,16 @@ import org.apache.atlas.typesystem.TypesDef;
import javax.inject.Singleton;
import java.lang.reflect.Constructor;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap;
@Singleton
......@@ -130,7 +139,7 @@ public class TypeSystem {
if (types.containsKey(name)) {
try {
return cls.cast(types.get(name));
} catch(ClassCastException cce) {
} catch (ClassCastException cce) {
throw new AtlasException(cce);
}
}
......@@ -149,20 +158,18 @@ public class TypeSystem {
*/
String[] mapType = TypeUtils.parseAsMapType(name);
if (mapType != null) {
IDataType dT = defineMapType(getDataType(IDataType.class, mapType[0]),
getDataType(IDataType.class, mapType[1]));
IDataType dT =
defineMapType(getDataType(IDataType.class, mapType[0]), getDataType(IDataType.class, mapType[1]));
return cls.cast(dT);
}
throw new TypeNotFoundException(String.format("Unknown datatype: %s", name));
}
public StructType defineStructType(String name,
boolean errorIfExists,
AttributeDefinition... attrDefs) throws AtlasException {
public StructType defineStructType(String name, boolean errorIfExists, AttributeDefinition... attrDefs)
throws AtlasException {
StructTypeDefinition structDef = new StructTypeDefinition(name, attrDefs);
defineTypes(ImmutableList.of(structDef),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
defineTypes(ImmutableList.of(structDef), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
return getDataType(StructType.class, structDef.typeName);
......@@ -177,10 +184,8 @@ public class TypeSystem {
* @return temporary struct type
* @throws AtlasException
*/
public StructType defineQueryResultType(String name,
Map<String, IDataType> tempTypes,
AttributeDefinition... attrDefs)
throws AtlasException {
public StructType defineQueryResultType(String name, Map<String, IDataType> tempTypes,
AttributeDefinition... attrDefs) throws AtlasException {
AttributeInfo[] infos = new AttributeInfo[attrDefs.length];
for (int i = 0; i < attrDefs.length; i++) {
......@@ -190,53 +195,44 @@ public class TypeSystem {
return new StructType(this, name, null, infos);
}
public TraitType defineTraitType(HierarchicalTypeDefinition<TraitType> traitDef)
throws AtlasException {
public TraitType defineTraitType(HierarchicalTypeDefinition<TraitType> traitDef) throws AtlasException {
defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(traitDef),
defineTypes(ImmutableList.<StructTypeDefinition>of(), ImmutableList.of(traitDef),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
return getDataType(TraitType.class, traitDef.typeName);
}
public ClassType defineClassType(HierarchicalTypeDefinition<ClassType> classDef)
throws AtlasException {
public ClassType defineClassType(HierarchicalTypeDefinition<ClassType> classDef) throws AtlasException {
defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
defineTypes(ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
ImmutableList.of(classDef));
return getDataType(ClassType.class, classDef.typeName);
}
public Map<String, IDataType> defineTraitTypes(
HierarchicalTypeDefinition<TraitType>... traitDefs) throws AtlasException {
TransientTypeSystem transientTypes = new TransientTypeSystem(
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.copyOf(traitDefs),
public Map<String, IDataType> defineTraitTypes(HierarchicalTypeDefinition<TraitType>... traitDefs)
throws AtlasException {
TransientTypeSystem transientTypes =
new TransientTypeSystem(ImmutableList.<StructTypeDefinition>of(), ImmutableList.copyOf(traitDefs),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
return transientTypes.defineTypes();
}
public Map<String, IDataType> defineClassTypes(
HierarchicalTypeDefinition<ClassType>... classDefs) throws AtlasException {
TransientTypeSystem transientTypes = new TransientTypeSystem(
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
ImmutableList.copyOf(classDefs));
public Map<String, IDataType> defineClassTypes(HierarchicalTypeDefinition<ClassType>... classDefs)
throws AtlasException {
TransientTypeSystem transientTypes = new TransientTypeSystem(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(), ImmutableList.copyOf(classDefs));
return transientTypes.defineTypes();
}
public Map<String, IDataType> defineTypes(TypesDef typesDef)
throws AtlasException {
public Map<String, IDataType> defineTypes(TypesDef typesDef) throws AtlasException {
Map<String, IDataType> typesAdded = new HashMap<>();
for (EnumTypeDefinition enumDef : typesDef.enumTypesAsJavaList()) {
typesAdded.put(enumDef.name, defineEnumType(enumDef));
}
ImmutableList<StructTypeDefinition> structDefs = ImmutableList
.copyOf(typesDef.structTypesAsJavaList());
ImmutableList<StructTypeDefinition> structDefs = ImmutableList.copyOf(typesDef.structTypesAsJavaList());
ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs =
ImmutableList.copyOf(typesDef.traitTypesAsJavaList());
ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs =
......@@ -248,31 +244,28 @@ public class TypeSystem {
public Map<String, IDataType> defineTypes(ImmutableList<StructTypeDefinition> structDefs,
ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs,
ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs)
throws AtlasException {
TransientTypeSystem transientTypes = new TransientTypeSystem(structDefs,
traitDefs,
classDefs);
ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs) throws AtlasException {
TransientTypeSystem transientTypes = new TransientTypeSystem(structDefs, traitDefs, classDefs);
Map<String, IDataType> definedTypes = transientTypes.defineTypes();
// LOG.debug("Defined new types " + Arrays.toString(definedTypes.keySet().toArray(new String[definedTypes.size()])));
// LOG.debug("Defined new types " + Arrays.toString(definedTypes.keySet().toArray(new
// String[definedTypes.size()])));
return definedTypes;
}
public DataTypes.ArrayType defineArrayType(IDataType elemType) throws AtlasException {
assert elemType != null;
DataTypes.ArrayType dT = new DataTypes.ArrayType(elemType);
// types.put(dT.getName(), dT);
// typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.ARRAY, dT.getName());
// types.put(dT.getName(), dT);
// typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.ARRAY, dT.getName());
return dT;
}
public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType)
throws AtlasException {
public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType) throws AtlasException {
assert keyType != null;
assert valueType != null;
DataTypes.MapType dT = new DataTypes.MapType(keyType, valueType);
// types.put(dT.getName(), dT);
// typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.MAP, dT.getName());
// types.put(dT.getName(), dT);
// typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.MAP, dT.getName());
return dT;
}
......@@ -283,8 +276,7 @@ public class TypeSystem {
public EnumType defineEnumType(EnumTypeDefinition eDef) throws AtlasException {
assert eDef.name != null;
if (types.containsKey(eDef.name)) {
throw new AtlasException(
String.format("Redefinition of type %s not supported", eDef.name));
throw new AtlasException(String.format("Redefinition of type %s not supported", eDef.name));
}
EnumType eT = new EnumType(this, eDef.name, eDef.enumValues);
......@@ -302,7 +294,7 @@ public class TypeSystem {
}
public void removeTypes(Collection<String> typeNames) {
for(String typeName : typeNames) {
for (String typeName : typeNames) {
IDataType dataType = types.get(typeName);
final DataTypes.TypeCategory typeCategory = dataType.getTypeCategory();
typeCategoriesToTypeNamesMap.get(typeCategory).remove(typeName);
......@@ -316,10 +308,8 @@ public class TypeSystem {
final ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs;
final ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs;
Map<String, StructTypeDefinition> structNameToDefMap = new HashMap<>();
Map<String, HierarchicalTypeDefinition<TraitType>> traitNameToDefMap =
new HashMap<>();
Map<String, HierarchicalTypeDefinition<ClassType>> classNameToDefMap =
new HashMap<>();
Map<String, HierarchicalTypeDefinition<TraitType>> traitNameToDefMap = new HashMap<>();
Map<String, HierarchicalTypeDefinition<ClassType>> classNameToDefMap = new HashMap<>();
Set<String> transientTypes;
......@@ -358,11 +348,10 @@ public class TypeSystem {
for (StructTypeDefinition sDef : structDefs) {
assert sDef.typeName != null;
if (dataType(sDef.typeName) != null) {
throw new AtlasException(
String.format("Cannot redefine type %s", sDef.typeName));
throw new AtlasException(String.format("Cannot redefine type %s", sDef.typeName));
}
TypeSystem.this.types.put(sDef.typeName,
new StructType(this, sDef.typeName, sDef.attributeDefinitions.length));
TypeSystem.this.types
.put(sDef.typeName, new StructType(this, sDef.typeName, sDef.attributeDefinitions.length));
structNameToDefMap.put(sDef.typeName, sDef);
transientTypes.add(sDef.typeName);
}
......@@ -370,12 +359,10 @@ public class TypeSystem {
for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) {
assert traitDef.typeName != null;
if (types.containsKey(traitDef.typeName)) {
throw new AtlasException(
String.format("Cannot redefine type %s", traitDef.typeName));
throw new AtlasException(String.format("Cannot redefine type %s", traitDef.typeName));
}
TypeSystem.this.types.put(traitDef.typeName,
new TraitType(this, traitDef.typeName, traitDef.superTypes,
TypeSystem.this.types.put(traitDef.typeName, new TraitType(this, traitDef.typeName, traitDef.superTypes,
traitDef.attributeDefinitions.length));
traitNameToDefMap.put(traitDef.typeName, traitDef);
transientTypes.add(traitDef.typeName);
......@@ -384,20 +371,17 @@ public class TypeSystem {
for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) {
assert classDef.typeName != null;
if (types.containsKey(classDef.typeName)) {
throw new AtlasException(
String.format("Cannot redefine type %s", classDef.typeName));
throw new AtlasException(String.format("Cannot redefine type %s", classDef.typeName));
}
TypeSystem.this.types.put(classDef.typeName,
new ClassType(this, classDef.typeName, classDef.superTypes,
TypeSystem.this.types.put(classDef.typeName, new ClassType(this, classDef.typeName, classDef.superTypes,
classDef.attributeDefinitions.length));
classNameToDefMap.put(classDef.typeName, classDef);
transientTypes.add(classDef.typeName);
}
}
private <U extends HierarchicalType> void validateSuperTypes(Class<U> cls,
HierarchicalTypeDefinition<U> def)
private <U extends HierarchicalType> void validateSuperTypes(Class<U> cls, HierarchicalTypeDefinition<U> def)
throws AtlasException {
Set<String> s = new HashSet<>();
ImmutableList<String> superTypes = def.superTypes;
......@@ -405,22 +389,21 @@ public class TypeSystem {
if (s.contains(superTypeName)) {
throw new AtlasException(
String.format("Type %s extends superType %s multiple times",
def.typeName, superTypeName));
String.format("Type %s extends superType %s multiple times", def.typeName, superTypeName));
}
IDataType dT = dataType(superTypeName);
if (dT == null) {
throw new AtlasException(
String.format("Unknown superType %s in definition of type %s",
superTypeName, def.typeName));
String.format("Unknown superType %s in definition of type %s", superTypeName,
def.typeName));
}
if (!cls.isAssignableFrom(dT.getClass())) {
throw new AtlasException(
String.format("SuperType %s must be a %s, in definition of type %s",
superTypeName, cls.getName(), def.typeName));
String.format("SuperType %s must be a %s, in definition of type %s", superTypeName,
cls.getName(), def.typeName));
}
s.add(superTypeName);
}
......@@ -445,8 +428,7 @@ public class TypeSystem {
}
}
private AttributeInfo constructAttributeInfo(AttributeDefinition attrDef)
throws AtlasException {
private AttributeInfo constructAttributeInfo(AttributeDefinition attrDef) throws AtlasException {
AttributeInfo info = new AttributeInfo(this, attrDef, null);
if (transientTypes.contains(attrDef.dataTypeName)) {
recursiveRefs.add(info);
......@@ -466,9 +448,8 @@ public class TypeSystem {
}
}
if (info.multiplicity.upper > 1 && !(
info.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP ||
info.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY)) {
if (info.multiplicity.upper > 1 && !(info.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP
|| info.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY)) {
throw new AtlasException(
String.format("A multiplicty of more than one requires a collection type for attribute '%s'",
info.name));
......@@ -477,8 +458,7 @@ public class TypeSystem {
return info;
}
private StructType constructStructureType(StructTypeDefinition def)
throws AtlasException {
private StructType constructStructureType(StructTypeDefinition def) throws AtlasException {
AttributeInfo[] infos = new AttributeInfo[def.attributeDefinitions.length];
for (int i = 0; i < def.attributeDefinitions.length; i++) {
infos[i] = constructAttributeInfo(def.attributeDefinitions[i]);
......@@ -490,25 +470,20 @@ public class TypeSystem {
}
private <U extends HierarchicalType> U constructHierarchicalType(Class<U> cls,
HierarchicalTypeDefinition<U> def)
throws AtlasException {
HierarchicalTypeDefinition<U> def) throws AtlasException {
AttributeInfo[] infos = new AttributeInfo[def.attributeDefinitions.length];
for (int i = 0; i < def.attributeDefinitions.length; i++) {
infos[i] = constructAttributeInfo(def.attributeDefinitions[i]);
}
try {
Constructor<U> cons = cls.getDeclaredConstructor(
TypeSystem.class,
String.class,
ImmutableList.class,
Constructor<U> cons = cls.getDeclaredConstructor(TypeSystem.class, String.class, ImmutableList.class,
AttributeInfo[].class);
U type = cons.newInstance(TypeSystem.this, def.typeName, def.superTypes, infos);
TypeSystem.this.types.put(def.typeName, type);
return type;
} catch (Exception e) {
throw new AtlasException(
String.format("Cannot construct Type of MetaType %s", cls.getName()), e);
throw new AtlasException(String.format("Cannot construct Type of MetaType %s", cls.getName()), e);
}
}
......@@ -537,14 +512,12 @@ public class TypeSystem {
}
for (TraitType traitType : traitTypes) {
constructHierarchicalType(TraitType.class,
traitNameToDefMap.get(traitType.getName()));
constructHierarchicalType(TraitType.class, traitNameToDefMap.get(traitType.getName()));
typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.TRAIT, traitType.getName());
}
for (ClassType classType : classTypes) {
constructHierarchicalType(ClassType.class,
classNameToDefMap.get(classType.getName()));
constructHierarchicalType(ClassType.class, classNameToDefMap.get(classType.getName()));
typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.CLASS, classType.getName());
}
}
......@@ -599,29 +572,25 @@ public class TypeSystem {
}
@Override
public StructType defineStructType(String name, boolean errorIfExists,
AttributeDefinition... attrDefs)
public StructType defineStructType(String name, boolean errorIfExists, AttributeDefinition... attrDefs)
throws AtlasException {
throw new AtlasException("Internal Error: define type called on TrasientTypeSystem");
}
@Override
public TraitType defineTraitType(HierarchicalTypeDefinition traitDef)
throws AtlasException {
public TraitType defineTraitType(HierarchicalTypeDefinition traitDef) throws AtlasException {
throw new AtlasException("Internal Error: define type called on TrasientTypeSystem");
}
@Override
public ClassType defineClassType(HierarchicalTypeDefinition<ClassType> classDef
) throws AtlasException {
public ClassType defineClassType(HierarchicalTypeDefinition<ClassType> classDef) throws AtlasException {
throw new AtlasException("Internal Error: define type called on TrasientTypeSystem");
}
@Override
public Map<String, IDataType> defineTypes(ImmutableList<StructTypeDefinition> structDefs,
ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs,
ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs)
throws AtlasException {
ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs) throws AtlasException {
throw new AtlasException("Internal Error: define type called on TrasientTypeSystem");
}
......@@ -631,8 +600,7 @@ public class TypeSystem {
}
@Override
public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType)
throws AtlasException {
public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType) throws AtlasException {
throw new AtlasException("Internal Error: define type called on TrasientTypeSystem");
}
}
......@@ -643,11 +611,12 @@ public class TypeSystem {
private static final String TYP_NAME = "__IdType";
private IdType() {
AttributeDefinition idAttr = new AttributeDefinition(ID_ATTRNAME,
DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null);
AttributeDefinition idAttr =
new AttributeDefinition(ID_ATTRNAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null);
AttributeDefinition typNmAttr =
new AttributeDefinition(TYPENAME_ATTRNAME,
DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null);
new AttributeDefinition(TYPENAME_ATTRNAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED,
false, null);
try {
AttributeInfo[] infos = new AttributeInfo[2];
infos[0] = new AttributeInfo(TypeSystem.this, idAttr, null);
......@@ -665,9 +634,17 @@ public class TypeSystem {
return getDataType(StructType.class, TYP_NAME);
}
public String getName() { return TYP_NAME; }
public String idAttrName() { return ID_ATTRNAME;}
public String typeNameAttrName() { return TYPENAME_ATTRNAME;}
public String getName() {
return TYP_NAME;
}
public String idAttrName() {
return ID_ATTRNAME;
}
public String typeNameAttrName() {
return TYPENAME_ATTRNAME;
}
}
public static final String ID_STRUCT_ID_ATTRNAME = IdType.ID_ATTRNAME;
......
......@@ -36,13 +36,11 @@ public class TypeUtils {
public static final String NAME_REGEX = "[a-zA-z][a-zA-Z0-9_]*";
public static final Pattern NAME_PATTERN = Pattern.compile(NAME_REGEX);
public static final Pattern ARRAY_TYPE_NAME_PATTERN = Pattern
.compile(String.format("array<(%s)>", NAME_REGEX));
public static final Pattern ARRAY_TYPE_NAME_PATTERN = Pattern.compile(String.format("array<(%s)>", NAME_REGEX));
public static final Pattern MAP_TYPE_NAME_PATTERN =
Pattern.compile(String.format("map<(%s),(%s)>", NAME_REGEX, NAME_REGEX));
public static void outputVal(String val, Appendable buf, String prefix)
throws AtlasException {
public static void outputVal(String val, Appendable buf, String prefix) throws AtlasException {
try {
buf.append(prefix).append(val);
} catch (IOException ie) {
......@@ -73,14 +71,14 @@ public class TypeUtils {
return ImmutableMap.copyOf(b);
}
public static TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums, ImmutableList<StructTypeDefinition> structs,
ImmutableList<HierarchicalTypeDefinition<TraitType>> traits,
public static TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums,
ImmutableList<StructTypeDefinition> structs, ImmutableList<HierarchicalTypeDefinition<TraitType>> traits,
ImmutableList<HierarchicalTypeDefinition<ClassType>> classes) {
return new TypesDef(JavaConversions.asScalaBuffer(enums), JavaConversions.asScalaBuffer(structs),
JavaConversions.asScalaBuffer(traits), JavaConversions.asScalaBuffer(classes));
}
protected static class Pair<L,R> {
protected static class Pair<L, R> {
protected L left;
protected R right;
......
......@@ -68,8 +68,7 @@ public class TypedStructHandler {
}
}
return ts;
} else if (val instanceof StructInstance &&
((StructInstance) val).getTypeName() == structType.getName()) {
} else if (val instanceof StructInstance && ((StructInstance) val).getTypeName() == structType.getName()) {
return (StructInstance) val;
} else {
throw new ValueConversionException(structType, val);
......@@ -86,9 +85,7 @@ public class TypedStructHandler {
}
public ITypedStruct createInstance() {
return new StructInstance(structType.getName(),
fieldMapping,
new boolean[fieldMapping.fields.size()],
return new StructInstance(structType.getName(), fieldMapping, new boolean[fieldMapping.fields.size()],
fieldMapping.numBools == 0 ? null : new boolean[fieldMapping.numBools],
fieldMapping.numBytes == 0 ? null : new byte[fieldMapping.numBytes],
fieldMapping.numShorts == 0 ? null : new short[fieldMapping.numShorts],
......@@ -96,18 +93,15 @@ public class TypedStructHandler {
fieldMapping.numLongs == 0 ? null : new long[fieldMapping.numLongs],
fieldMapping.numFloats == 0 ? null : new float[fieldMapping.numFloats],
fieldMapping.numDoubles == 0 ? null : new double[fieldMapping.numDoubles],
fieldMapping.numBigDecimals == 0 ? null
: new BigDecimal[fieldMapping.numBigDecimals],
fieldMapping.numBigDecimals == 0 ? null : new BigDecimal[fieldMapping.numBigDecimals],
fieldMapping.numBigInts == 0 ? null : new BigInteger[fieldMapping.numBigInts],
fieldMapping.numDates == 0 ? null : new Date[fieldMapping.numDates],
fieldMapping.numStrings == 0 ? null : new String[fieldMapping.numStrings],
fieldMapping.numArrays == 0 ? null : new ImmutableList[fieldMapping.numArrays],
fieldMapping.numMaps == 0 ? null : new ImmutableMap[fieldMapping.numMaps],
fieldMapping.numStructs == 0 ? null : new StructInstance[fieldMapping.numStructs],
fieldMapping.numReferenceables == 0 ? null
: new ReferenceableInstance[fieldMapping.numReferenceables],
fieldMapping.numReferenceables == 0 ? null
: new Id[fieldMapping.numReferenceables]);
fieldMapping.numReferenceables == 0 ? null : new ReferenceableInstance[fieldMapping.numReferenceables],
fieldMapping.numReferenceables == 0 ? null : new Id[fieldMapping.numReferenceables]);
}
public void output(IStruct s, Appendable buf, String prefix) throws AtlasException {
......
......@@ -27,19 +27,16 @@ public class ValueConversionException extends AtlasException {
}
public ValueConversionException(IDataType typ, Object val, Throwable t) {
super(String
.format("Cannot convert value '%s' to datatype %s", val.toString(), typ.getName()),
t);
super(String.format("Cannot convert value '%s' to datatype %s", val.toString(), typ.getName()), t);
}
public ValueConversionException(IDataType typ, Object val, String msg) {
super(String.format("Cannot convert value '%s' to datatype %s because: %s",
val.toString(), typ.getName(), msg));
super(String
.format("Cannot convert value '%s' to datatype %s because: %s", val.toString(), typ.getName(), msg));
}
public ValueConversionException(String typeName, Object val, String msg) {
super(String.format("Cannot convert value '%s' to datatype %s because: %s",
val.toString(), typeName, msg));
super(String.format("Cannot convert value '%s' to datatype %s because: %s", val.toString(), typeName, msg));
}
protected ValueConversionException(String msg) {
......
......@@ -37,40 +37,32 @@ public class TypesUtil {
private TypesUtil() {
}
public static AttributeDefinition createOptionalAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(),
Multiplicity.OPTIONAL, false, null);
public static AttributeDefinition createOptionalAttrDef(String name, IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.OPTIONAL, false, null);
}
public static AttributeDefinition createOptionalAttrDef(String name,
String dataType) {
public static AttributeDefinition createOptionalAttrDef(String name, String dataType) {
return new AttributeDefinition(name, dataType, Multiplicity.OPTIONAL, false, null);
}
public static AttributeDefinition createRequiredAttrDef(String name,
String dataType) {
public static AttributeDefinition createRequiredAttrDef(String name, String dataType) {
return new AttributeDefinition(name, dataType, Multiplicity.REQUIRED, false, null);
}
public static AttributeDefinition createUniqueRequiredAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(),
Multiplicity.REQUIRED, false, true, true, null);
public static AttributeDefinition createUniqueRequiredAttrDef(String name, IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.REQUIRED, false, true, true, null);
}
public static AttributeDefinition createRequiredAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(),
Multiplicity.REQUIRED, false, null);
public static AttributeDefinition createRequiredAttrDef(String name, IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.REQUIRED, false, null);
}
public static EnumTypeDefinition createEnumTypeDef(String name, EnumValue... enumValues) {
return new EnumTypeDefinition(name, enumValues);
}
public static HierarchicalTypeDefinition<TraitType> createTraitTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
public static HierarchicalTypeDefinition<TraitType> createTraitTypeDef(String name,
ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition<>(TraitType.class, name, superTypes, attrDefs);
}
......@@ -78,8 +70,8 @@ public class TypesUtil {
return new StructTypeDefinition(name, attrDefs);
}
public static HierarchicalTypeDefinition<ClassType> createClassTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
public static HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name,
ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition<>(ClassType.class, name, superTypes, attrDefs);
}
}
......@@ -62,38 +62,26 @@ public class SerializationJavaTest extends BaseTest {
TypeSystem ts = getTypeSystem();
HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true,
"department")
);
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person",
ImmutableList.<String>of(),
new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.COLLECTION,
true, "department"));
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department",
"Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager",
"Manager", Multiplicity.OPTIONAL, false, "subordinates")
);
HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager",
ImmutableList.<String>of("Person"),
new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, false,
"manager")
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = createTraitTypeDef(
"SecurityClearance",
ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"));
HierarchicalTypeDefinition<ClassType> managerTypeDef =
createClassTypeDef("Manager", ImmutableList.<String>of("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager"));
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
createTraitTypeDef("SecurityClearance", ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(securityClearanceTypeDef),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef,
managerTypeDef));
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef, managerTypeDef));
Referenceable hrDept = new Referenceable("Department");
Referenceable john = new Referenceable("Person");
......@@ -155,11 +143,9 @@ public class SerializationJavaTest extends BaseTest {
public void testTrait() throws AtlasException {
TypeSystem ts = getTypeSystem();
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = createTraitTypeDef(
"SecurityClearance2",
ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
createTraitTypeDef("SecurityClearance2", ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(securityClearanceTypeDef),
......
......@@ -37,11 +37,10 @@ public abstract class BaseTest {
public static final String STRUCT_TYPE_1 = "t1";
public static final String STRUCT_TYPE_2 = "t2";
public static final String TEST_DATE = "2014-12-11T02:35:58.440Z";
public static final long TEST_DATE_IN_LONG=1418265358440L;
public static final long TEST_DATE_IN_LONG = 1418265358440L;
public static Struct createStruct() throws AtlasException {
StructType structType = TypeSystem.getInstance().getDataType(
StructType.class, STRUCT_TYPE_1);
StructType structType = TypeSystem.getInstance().getDataType(StructType.class, STRUCT_TYPE_1);
Struct s = new Struct(structType.getName());
s.set("a", 1);
s.set("b", true);
......@@ -56,8 +55,7 @@ public abstract class BaseTest {
s.set("k", new BigDecimal(1));
s.set("l", new Date(1418265358440L));
s.set("m", Lists.asList(1, new Integer[]{1}));
s.set("n", Lists.asList(BigDecimal.valueOf(1.1),
new BigDecimal[]{BigDecimal.valueOf(1.1)}));
s.set("n", Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
Map<String, Double> hm = Maps.newHashMap();
hm.put("a", 1.0);
hm.put("b", 2.0);
......@@ -74,9 +72,8 @@ public abstract class BaseTest {
TypeSystem ts = TypeSystem.getInstance();
ts.reset();
StructType structType = ts.defineStructType(STRUCT_TYPE_1,
true,
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
StructType structType =
ts.defineStructType(STRUCT_TYPE_1, true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
......@@ -89,26 +86,24 @@ public abstract class BaseTest {
TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
TypesUtil.createOptionalAttrDef("o",
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), TypesUtil
.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)));
System.out.println("defined structType = " + structType);
StructType recursiveStructType = ts.defineStructType(STRUCT_TYPE_2,
true,
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
StructType recursiveStructType =
ts.defineStructType(STRUCT_TYPE_2, true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2));
System.out.println("defined recursiveStructType = " + recursiveStructType);
}
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs)
throws AtlasException {
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs) throws AtlasException {
return getTypeSystem().defineTraitTypes(tDefs);
}
protected Map<String, IDataType> defineClasses(
HierarchicalTypeDefinition<ClassType>... classDefs) throws AtlasException {
protected Map<String, IDataType> defineClasses(HierarchicalTypeDefinition<ClassType>... classDefs)
throws AtlasException {
return getTypeSystem().defineClassTypes(classDefs);
}
......@@ -123,47 +118,30 @@ public abstract class BaseTest {
protected void defineDeptEmployeeTypes(TypeSystem ts) throws AtlasException {
HierarchicalTypeDefinition<ClassType> deptTypeDef = TypesUtil
.createClassTypeDef("Department",
ImmutableList.<String>of(),
.createClassTypeDef("Department", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true,
"department")
);
HierarchicalTypeDefinition<ClassType> personTypeDef = TypesUtil.createClassTypeDef("Person",
ImmutableList.<String>of(),
new AttributeDefinition("employees", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, true, "department"));
HierarchicalTypeDefinition<ClassType> personTypeDef = TypesUtil
.createClassTypeDef("Person", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department",
"Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager",
"Manager", Multiplicity.OPTIONAL, false, "subordinates")
);
HierarchicalTypeDefinition<ClassType> managerTypeDef =
TypesUtil.createClassTypeDef("Manager",
ImmutableList.of("Person"),
new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager")
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
TypesUtil.createTraitTypeDef(
"SecurityClearance",
ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(securityClearanceTypeDef),
ImmutableList.of(deptTypeDef, personTypeDef,
managerTypeDef));
ImmutableList.of(
ts.getDataType(HierarchicalType.class, "SecurityClearance"),
ts.getDataType(ClassType.class, "Department"),
ts.getDataType(ClassType.class, "Person"),
ts.getDataType(ClassType.class, "Manager")
);
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"));
HierarchicalTypeDefinition<ClassType> managerTypeDef = TypesUtil
.createClassTypeDef("Manager", ImmutableList.of("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager"));
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = TypesUtil
.createTraitTypeDef("SecurityClearance", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(), ImmutableList.of(securityClearanceTypeDef),
ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
ImmutableList.of(ts.getDataType(HierarchicalType.class, "SecurityClearance"),
ts.getDataType(ClassType.class, "Department"), ts.getDataType(ClassType.class, "Person"),
ts.getDataType(ClassType.class, "Manager"));
}
protected Referenceable createDeptEg1(TypeSystem ts) throws AtlasException {
......
......@@ -48,26 +48,16 @@ public class EnumTest extends BaseTest {
}
void defineEnums(TypeSystem ts) throws AtlasException {
ts.defineEnumType("HiveObjectType",
new EnumValue("GLOBAL", 1),
new EnumValue("DATABASE", 2),
new EnumValue("TABLE", 3),
new EnumValue("PARTITION", 4),
new EnumValue("COLUMN", 5));
ts.defineEnumType("PrincipalType",
new EnumValue("USER", 1),
new EnumValue("ROLE", 2),
ts.defineEnumType("HiveObjectType", new EnumValue("GLOBAL", 1), new EnumValue("DATABASE", 2),
new EnumValue("TABLE", 3), new EnumValue("PARTITION", 4), new EnumValue("COLUMN", 5));
ts.defineEnumType("PrincipalType", new EnumValue("USER", 1), new EnumValue("ROLE", 2),
new EnumValue("GROUP", 3));
ts.defineEnumType("TxnState",
new EnumValue("COMMITTED", 1),
new EnumValue("ABORTED", 2),
ts.defineEnumType("TxnState", new EnumValue("COMMITTED", 1), new EnumValue("ABORTED", 2),
new EnumValue("OPEN", 3));
ts.defineEnumType("LockLevel",
new EnumValue("DB", 1),
new EnumValue("TABLE", 2),
ts.defineEnumType("LockLevel", new EnumValue("DB", 1), new EnumValue("TABLE", 2),
new EnumValue("PARTITION", 3));
}
......@@ -86,8 +76,7 @@ public class EnumTest extends BaseTest {
s.set("k", new BigDecimal(1));
s.set("l", new Date(1418265358440L));
s.set("m", Lists.asList(1, new Integer[]{1}));
s.set("n",
Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
s.set("n", Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
Map<String, Double> hm = Maps.newHashMap();
hm.put("a", 1.0);
hm.put("b", 2.0);
......@@ -111,15 +100,13 @@ public class EnumTest extends BaseTest {
}
protected ClassType defineClassTypeWithEnum(TypeSystem ts) throws AtlasException {
return ts.defineClassType(createClassTypeDef("t4",
ImmutableList.<String>of(),
createRequiredAttrDef("a", DataTypes.INT_TYPE),
return ts.defineClassType(
createClassTypeDef("t4", ImmutableList.<String>of(), createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")),
createOptionalAttrDef("e", DataTypes.INT_TYPE),
createOptionalAttrDef("f", DataTypes.INT_TYPE),
createOptionalAttrDef("e", DataTypes.INT_TYPE), createOptionalAttrDef("f", DataTypes.INT_TYPE),
createOptionalAttrDef("g", DataTypes.LONG_TYPE),
createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")),
createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
......@@ -130,8 +117,7 @@ public class EnumTest extends BaseTest {
createOptionalAttrDef("l", DataTypes.DATE_TYPE),
createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel"))));
}
......@@ -139,27 +125,21 @@ public class EnumTest extends BaseTest {
public void testStruct() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineEnums(ts);
StructType structType = ts.defineStructType("t3",
true,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
StructType structType = ts.defineStructType("t3", true, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")),
createOptionalAttrDef("e", DataTypes.INT_TYPE),
createOptionalAttrDef("f", DataTypes.INT_TYPE),
createOptionalAttrDef("e", DataTypes.INT_TYPE), createOptionalAttrDef("f", DataTypes.INT_TYPE),
createOptionalAttrDef("g", DataTypes.LONG_TYPE),
createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")),
createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
createOptionalAttrDef("h", DataTypes.FLOAT_TYPE), createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
createOptionalAttrDef("l", DataTypes.DATE_TYPE),
createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel")));
Struct s = createStructWithEnum("t3");
......
......@@ -34,8 +34,7 @@ public class StructTest extends BaseTest {
public void setup() throws Exception {
super.setup();
structType = getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_1);
recursiveStructType = getTypeSystem()
.getDataType(StructType.class, STRUCT_TYPE_2);
recursiveStructType = getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_2);
}
@Test
......
......@@ -63,15 +63,13 @@ public class TraitTest extends BaseTest {
*/
@Test
public void test1() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createTraitTypeDef("C", ImmutableList.<String>of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
......@@ -79,11 +77,12 @@ public class TraitTest extends BaseTest {
TraitType DType = (TraitType) getTypeSystem().getDataType(TraitType.class, "D");
// for(String aName : DType.fieldMapping().fields.keySet()) {
// System.out.println(String.format("nameToQualifiedName.put(\"%s\", \"%s\");", aName, DType.getQualifiedName(aName)));
// }
// for(String aName : DType.fieldMapping().fields.keySet()) {
// System.out.println(String.format("nameToQualifiedName.put(\"%s\", \"%s\");", aName, DType
// .getQualifiedName(aName)));
// }
Map<String,String> nameToQualifiedName = new HashMap();
Map<String, String> nameToQualifiedName = new HashMap();
{
nameToQualifiedName.put("d", "D.d");
nameToQualifiedName.put("b", "B.b");
......@@ -172,15 +171,13 @@ public class TraitTest extends BaseTest {
@Test
public void testRandomOrder() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createTraitTypeDef("C", ImmutableList.<String>of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
......
......@@ -51,11 +51,10 @@ public class TypeInheritanceTest extends BaseTest {
*/
@Test
public void testSimpleInheritance() throws AtlasException {
HierarchicalTypeDefinition A = createClassTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition B =
createClassTypeDef("B", ImmutableList.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
defineClasses(A, B);
......@@ -80,12 +79,11 @@ public class TypeInheritanceTest extends BaseTest {
*/
@Test
public void testSimpleInheritanceWithOverrides() throws AtlasException {
HierarchicalTypeDefinition A = createClassTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createRequiredAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition B =
createClassTypeDef("B", ImmutableList.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
defineClasses(A, B);
......@@ -114,17 +112,16 @@ public class TypeInheritanceTest extends BaseTest {
*/
@Test
public void testMultiLevelInheritance() throws AtlasException {
HierarchicalTypeDefinition A = createClassTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition B =
createClassTypeDef("B", ImmutableList.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createClassTypeDef("C", ImmutableList.of("B"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createClassTypeDef("C", ImmutableList.of("B"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createClassTypeDef("D", ImmutableList.of("C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition D =
createClassTypeDef("D", ImmutableList.of("C"), createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
defineClasses(A, B, C, D);
......@@ -167,17 +164,15 @@ public class TypeInheritanceTest extends BaseTest {
*/
@Test
public void testDiamondInheritance() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.of("A"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.of("B", "C"),
HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B =
createTraitTypeDef("B", ImmutableList.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C =
createTraitTypeDef("C", ImmutableList.of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D =
createTraitTypeDef("D", ImmutableList.of("B", "C"), createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
defineTraits(A, B, C, D);
......
......@@ -49,29 +49,22 @@ public class TypeSystemTest extends BaseTest {
@Test
public void testGetTypeNames() throws Exception {
getTypeSystem().defineEnumType("enum_test",
new EnumValue("0", 0),
new EnumValue("1", 1),
new EnumValue("2", 2),
getTypeSystem().defineEnumType("enum_test", new EnumValue("0", 0), new EnumValue("1", 1), new EnumValue("2", 2),
new EnumValue("3", 3));
Assert.assertTrue(getTypeSystem().getTypeNames().contains("enum_test"));
}
@Test
public void testIsRegistered() throws Exception {
getTypeSystem().defineEnumType("enum_test",
new EnumValue("0", 0),
new EnumValue("1", 1),
new EnumValue("2", 2),
getTypeSystem().defineEnumType("enum_test", new EnumValue("0", 0), new EnumValue("1", 1), new EnumValue("2", 2),
new EnumValue("3", 3));
Assert.assertTrue(getTypeSystem().isRegistered("enum_test"));
}
@Test
public void testGetTraitsNames() throws Exception {
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition =
TypesUtil.createTraitTypeDef("Classification",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil
.createTraitTypeDef("Classification", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<TraitType> piiTrait =
TypesUtil.createTraitTypeDef("PII", ImmutableList.<String>of());
......@@ -86,23 +79,13 @@ public class TypeSystemTest extends BaseTest {
HierarchicalTypeDefinition<TraitType> financeTrait =
TypesUtil.createTraitTypeDef("Finance", ImmutableList.<String>of());
getTypeSystem().defineTypes(
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait,
soxTrait, secTrait, financeTrait),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
getTypeSystem().defineTypes(ImmutableList.<StructTypeDefinition>of(), ImmutableList
.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait, soxTrait, secTrait,
financeTrait), ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
final ImmutableList<String> traitsNames = getTypeSystem().getTypeNamesByCategory(DataTypes.TypeCategory.TRAIT);
Assert.assertEquals(traitsNames.size(), 7);
List traits = Arrays.asList(new String[]{
"Classification",
"PII",
"PHI",
"PCI",
"SOX",
"SEC",
"Finance",
});
List traits = Arrays.asList(new String[]{"Classification", "PII", "PHI", "PCI", "SOX", "SEC", "Finance",});
Assert.assertFalse(Collections.disjoint(traitsNames, traits));
}
......@@ -122,17 +105,16 @@ public class TypeSystemTest extends BaseTest {
String structName = random();
String attrType = random();
StructTypeDefinition structType = createStructTypeDef(structName,
createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
StructTypeDefinition structType =
createStructTypeDef(structName, createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
String className = random();
HierarchicalTypeDefinition<ClassType> classType =
createClassTypeDef(className, ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> classType = createClassTypeDef(className, ImmutableList.<String>of(),
createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
String traitName = random();
HierarchicalTypeDefinition<TraitType> traitType = createTraitTypeDef(traitName,
ImmutableList.<String>of(), createRequiredAttrDef(attrType, DataTypes.INT_TYPE));
HierarchicalTypeDefinition<TraitType> traitType = createTraitTypeDef(traitName, ImmutableList.<String>of(),
createRequiredAttrDef(attrType, DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.of(structType), ImmutableList.of(traitType), ImmutableList.of(classType));
}
......
......@@ -25,11 +25,10 @@ import org.testng.annotations.Test;
public class ValidationTest {
@DataProvider(name = "attributeData")
private Object[][] createAttributeData() {
return new String[][]{
{null, "type"}, {"", "type"}, {"name", null}, {"name", ""}};
return new String[][]{{null, "type"}, {"", "type"}, {"name", null}, {"name", ""}};
}
@Test (dataProvider = "attributeData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "attributeData", expectedExceptions = {IllegalArgumentException.class})
public void testAttributes(String name, String type) {
TypesUtil.createRequiredAttrDef(name, type);
}
......@@ -39,7 +38,7 @@ public class ValidationTest {
return new String[][]{{null}, {""}};
}
@Test (dataProvider = "enumValueData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "enumValueData", expectedExceptions = {IllegalArgumentException.class})
public void testEnumValue(String name) {
new EnumValue(name, 1);
}
......@@ -50,7 +49,7 @@ public class ValidationTest {
return new Object[][]{{null, value}, {"", value}, {"name"}};
}
@Test (dataProvider = "enumTypeData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "enumTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testEnumType(String name, EnumValue... values) {
new EnumTypeDefinition(name, values);
}
......@@ -61,7 +60,7 @@ public class ValidationTest {
return new Object[][]{{null, value}, {"", value}, {"name"}};
}
@Test (dataProvider = "structTypeData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "structTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testStructType(String name, AttributeDefinition... values) {
new StructTypeDefinition(name, values);
}
......@@ -71,15 +70,17 @@ public class ValidationTest {
return new Object[][]{{null}, {""}};
}
@Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testClassType(String name) {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");;
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");
;
TypesUtil.createClassTypeDef(name, ImmutableList.of("super"), value);
}
@Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testTraitType(String name) {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");;
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");
;
TypesUtil.createTraitTypeDef(name, ImmutableList.of("super"), value);
}
......
......@@ -307,7 +307,7 @@
</systemProperty>
<systemProperty>
<name>truststore.file</name>
<value>${project.build.directory}/../../webapp/target/atlas.keystore </value>
<value>${project.build.directory}/../../webapp/target/atlas.keystore</value>
</systemProperty>
<systemProperty>
<name>atlas.home</name>
......
......@@ -64,8 +64,7 @@ public final class Main {
public static void main(String[] args) throws Exception {
CommandLine cmd = parseArgs(args);
PropertiesConfiguration buildConfiguration =
new PropertiesConfiguration("atlas-buildinfo.properties");
PropertiesConfiguration buildConfiguration = new PropertiesConfiguration("atlas-buildinfo.properties");
String appPath = "webapp/target/atlas-webapp-" + getProjectVersion(buildConfiguration);
if (cmd.hasOption(APP_PATH)) {
......@@ -103,21 +102,18 @@ public final class Main {
appPort = Integer.valueOf(cmd.getOptionValue(APP_PORT));
} else {
// default : atlas.enableTLS is true
appPort = StringUtils.isEmpty(enableTLSFlag)
|| enableTLSFlag.equals("true") ? 21443 : 21000;
appPort = StringUtils.isEmpty(enableTLSFlag) || enableTLSFlag.equals("true") ? 21443 : 21000;
}
return appPort;
}
private static boolean isTLSEnabled(String enableTLSFlag, int appPort) {
return Boolean.valueOf(StringUtils.isEmpty(enableTLSFlag)
? System.getProperty("atlas.enableTLS", (appPort % 1000) == 443 ? "true" : "false")
: enableTLSFlag);
return Boolean.valueOf(StringUtils.isEmpty(enableTLSFlag) ?
System.getProperty("atlas.enableTLS", (appPort % 1000) == 443 ? "true" : "false") : enableTLSFlag);
}
private static void showStartupInfo(PropertiesConfiguration buildConfiguration,
boolean enableTLS, int appPort) {
private static void showStartupInfo(PropertiesConfiguration buildConfiguration, boolean enableTLS, int appPort) {
StringBuilder buffer = new StringBuilder();
buffer.append("\n############################################");
buffer.append("############################################");
......
......@@ -79,10 +79,9 @@ public class QuickStart {
private static final String LOAD_PROCESS_TYPE = "LoadProcess";
private static final String STORAGE_DESC_TYPE = "StorageDesc";
private static final String[] TYPES = {
DATABASE_TYPE, TABLE_TYPE, STORAGE_DESC_TYPE, COLUMN_TYPE, LOAD_PROCESS_TYPE, VIEW_TYPE,
"JdbcAccess", "ETL", "Metric", "PII", "Fact", "Dimension"
};
private static final String[] TYPES =
{DATABASE_TYPE, TABLE_TYPE, STORAGE_DESC_TYPE, COLUMN_TYPE, LOAD_PROCESS_TYPE, VIEW_TYPE, "JdbcAccess",
"ETL", "Metric", "PII", "Fact", "Dimension"};
private final AtlasClient metadataServiceClient;
......@@ -102,97 +101,62 @@ public class QuickStart {
}
TypesDef createTypeDefinitions() throws Exception {
HierarchicalTypeDefinition<ClassType> dbClsDef
= TypesUtil.createClassTypeDef(DATABASE_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
attrDef("description", DataTypes.STRING_TYPE),
attrDef("locationUri", DataTypes.STRING_TYPE),
attrDef("owner", DataTypes.STRING_TYPE),
attrDef("createTime", DataTypes.INT_TYPE)
);
HierarchicalTypeDefinition<ClassType> storageDescClsDef =
TypesUtil.createClassTypeDef(STORAGE_DESC_TYPE, null,
attrDef("location", DataTypes.STRING_TYPE),
attrDef("inputFormat", DataTypes.STRING_TYPE),
attrDef("outputFormat", DataTypes.STRING_TYPE),
attrDef("compressed", DataTypes.STRING_TYPE,
Multiplicity.REQUIRED, false, null)
);
HierarchicalTypeDefinition<ClassType> columnClsDef =
TypesUtil.createClassTypeDef(COLUMN_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
attrDef("dataType", DataTypes.STRING_TYPE),
attrDef("comment", DataTypes.STRING_TYPE)
);
HierarchicalTypeDefinition<ClassType> tblClsDef =
TypesUtil.createClassTypeDef(TABLE_TYPE, ImmutableList.of("DataSet"),
new AttributeDefinition("db", DATABASE_TYPE,
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("sd", STORAGE_DESC_TYPE,
Multiplicity.REQUIRED, true, null),
attrDef("owner", DataTypes.STRING_TYPE),
attrDef("createTime", DataTypes.INT_TYPE),
attrDef("lastAccessTime", DataTypes.INT_TYPE),
attrDef("retention", DataTypes.INT_TYPE),
HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
.createClassTypeDef(DATABASE_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
attrDef("description", DataTypes.STRING_TYPE), attrDef("locationUri", DataTypes.STRING_TYPE),
attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.INT_TYPE));
HierarchicalTypeDefinition<ClassType> storageDescClsDef = TypesUtil
.createClassTypeDef(STORAGE_DESC_TYPE, null, attrDef("location", DataTypes.STRING_TYPE),
attrDef("inputFormat", DataTypes.STRING_TYPE), attrDef("outputFormat", DataTypes.STRING_TYPE),
attrDef("compressed", DataTypes.STRING_TYPE, Multiplicity.REQUIRED, false, null));
HierarchicalTypeDefinition<ClassType> columnClsDef = TypesUtil
.createClassTypeDef(COLUMN_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
attrDef("dataType", DataTypes.STRING_TYPE), attrDef("comment", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<ClassType> tblClsDef = TypesUtil
.createClassTypeDef(TABLE_TYPE, ImmutableList.of("DataSet"),
new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
new AttributeDefinition("sd", STORAGE_DESC_TYPE, Multiplicity.REQUIRED, true, null),
attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.INT_TYPE),
attrDef("lastAccessTime", DataTypes.INT_TYPE), attrDef("retention", DataTypes.INT_TYPE),
attrDef("viewOriginalText", DataTypes.STRING_TYPE),
attrDef("viewExpandedText", DataTypes.STRING_TYPE),
attrDef("tableType", DataTypes.STRING_TYPE),
attrDef("viewExpandedText", DataTypes.STRING_TYPE), attrDef("tableType", DataTypes.STRING_TYPE),
attrDef("temporary", DataTypes.BOOLEAN_TYPE),
new AttributeDefinition("columns",
DataTypes.arrayTypeName(COLUMN_TYPE),
Multiplicity.COLLECTION, true, null)
);
HierarchicalTypeDefinition<ClassType> loadProcessClsDef =
TypesUtil.createClassTypeDef(LOAD_PROCESS_TYPE, ImmutableList.of("Process"),
attrDef("userName", DataTypes.STRING_TYPE),
attrDef("startTime", DataTypes.INT_TYPE),
new AttributeDefinition("columns", DataTypes.arrayTypeName(COLUMN_TYPE),
Multiplicity.COLLECTION, true, null));
HierarchicalTypeDefinition<ClassType> loadProcessClsDef = TypesUtil
.createClassTypeDef(LOAD_PROCESS_TYPE, ImmutableList.of("Process"),
attrDef("userName", DataTypes.STRING_TYPE), attrDef("startTime", DataTypes.INT_TYPE),
attrDef("endTime", DataTypes.INT_TYPE),
attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED)
);
HierarchicalTypeDefinition<ClassType> viewClsDef =
TypesUtil.createClassTypeDef(VIEW_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("db", DATABASE_TYPE,
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("inputTables",
DataTypes.arrayTypeName(TABLE_TYPE),
Multiplicity.COLLECTION, false, null)
);
HierarchicalTypeDefinition<TraitType> dimTraitDef =
TypesUtil.createTraitTypeDef("Dimension", null);
HierarchicalTypeDefinition<TraitType> factTraitDef =
TypesUtil.createTraitTypeDef("Fact", null);
HierarchicalTypeDefinition<TraitType> piiTraitDef =
TypesUtil.createTraitTypeDef("PII", null);
HierarchicalTypeDefinition<TraitType> metricTraitDef =
TypesUtil.createTraitTypeDef("Metric", null);
HierarchicalTypeDefinition<TraitType> etlTraitDef =
TypesUtil.createTraitTypeDef("ETL", null);
HierarchicalTypeDefinition<TraitType> jdbcTraitDef =
TypesUtil.createTraitTypeDef("JdbcAccess", null);
return TypeUtils.getTypesDef(
ImmutableList.<EnumTypeDefinition>of(),
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(dimTraitDef, factTraitDef,
piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef),
ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef,
tblClsDef, loadProcessClsDef, viewClsDef)
);
attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED));
HierarchicalTypeDefinition<ClassType> viewClsDef = TypesUtil
.createClassTypeDef(VIEW_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
new AttributeDefinition("inputTables", DataTypes.arrayTypeName(TABLE_TYPE),
Multiplicity.COLLECTION, false, null));
HierarchicalTypeDefinition<TraitType> dimTraitDef = TypesUtil.createTraitTypeDef("Dimension", null);
HierarchicalTypeDefinition<TraitType> factTraitDef = TypesUtil.createTraitTypeDef("Fact", null);
HierarchicalTypeDefinition<TraitType> piiTraitDef = TypesUtil.createTraitTypeDef("PII", null);
HierarchicalTypeDefinition<TraitType> metricTraitDef = TypesUtil.createTraitTypeDef("Metric", null);
HierarchicalTypeDefinition<TraitType> etlTraitDef = TypesUtil.createTraitTypeDef("ETL", null);
HierarchicalTypeDefinition<TraitType> jdbcTraitDef = TypesUtil.createTraitTypeDef("JdbcAccess", null);
return TypeUtils.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(dimTraitDef, factTraitDef, piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef),
ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef, tblClsDef, loadProcessClsDef, viewClsDef));
}
AttributeDefinition attrDef(String name, IDataType dT) {
......@@ -203,86 +167,73 @@ public class QuickStart {
return attrDef(name, dT, m, false, null);
}
AttributeDefinition attrDef(String name, IDataType dT,
Multiplicity m, boolean isComposite, String reverseAttributeName) {
AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m, boolean isComposite,
String reverseAttributeName) {
Preconditions.checkNotNull(name);
Preconditions.checkNotNull(dT);
return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName);
}
void createEntities() throws Exception {
Id salesDB = database(
"Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales");
Id salesDB = database("Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales");
Referenceable sd = rawStorageDescriptor("hdfs://host:8000/apps/warehouse/sales",
"TextInputFormat", "TextOutputFormat", true);
Referenceable sd =
rawStorageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat",
true);
List<Referenceable> salesFactColumns = ImmutableList.of(
rawColumn("time_id", "int", "time id"),
rawColumn("product_id", "int", "product id"),
List<Referenceable> salesFactColumns = ImmutableList
.of(rawColumn("time_id", "int", "time id"), rawColumn("product_id", "int", "product id"),
rawColumn("customer_id", "int", "customer id", "PII"),
rawColumn("sales", "double", "product id", "Metric")
);
rawColumn("sales", "double", "product id", "Metric"));
Id salesFact = table("sales_fact", "sales fact table",
salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact");
Id salesFact = table("sales_fact", "sales fact table", salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact");
List<Referenceable> productDimColumns = ImmutableList.of(
rawColumn("product_id", "int", "product id"),
rawColumn("product_name", "string", "product name"),
rawColumn("brand_name", "int", "brand name")
);
List<Referenceable> productDimColumns = ImmutableList
.of(rawColumn("product_id", "int", "product id"), rawColumn("product_name", "string", "product name"),
rawColumn("brand_name", "int", "brand name"));
Id productDim = table("product_dim", "product dimension table",
salesDB, sd, "John Doe", "Managed", productDimColumns, "Dimension");
Id productDim =
table("product_dim", "product dimension table", salesDB, sd, "John Doe", "Managed", productDimColumns,
"Dimension");
List<Referenceable> timeDimColumns = ImmutableList.of(
rawColumn("time_id", "int", "time id"),
rawColumn("dayOfYear", "int", "day Of Year"),
rawColumn("weekDay", "int", "week Day")
);
List<Referenceable> timeDimColumns = ImmutableList
.of(rawColumn("time_id", "int", "time id"), rawColumn("dayOfYear", "int", "day Of Year"),
rawColumn("weekDay", "int", "week Day"));
Id timeDim = table("time_dim", "time dimension table",
salesDB, sd, "John Doe", "External", timeDimColumns, "Dimension");
Id timeDim = table("time_dim", "time dimension table", salesDB, sd, "John Doe", "External", timeDimColumns,
"Dimension");
List<Referenceable> customerDimColumns = ImmutableList.of(
rawColumn("customer_id", "int", "customer id", "PII"),
List<Referenceable> customerDimColumns = ImmutableList.of(rawColumn("customer_id", "int", "customer id", "PII"),
rawColumn("name", "string", "customer name", "PII"),
rawColumn("address", "string", "customer address", "PII")
);
rawColumn("address", "string", "customer address", "PII"));
Id customerDim = table("customer_dim", "customer dimension table",
salesDB, sd, "fetl", "External", customerDimColumns, "Dimension");
Id customerDim =
table("customer_dim", "customer dimension table", salesDB, sd, "fetl", "External", customerDimColumns,
"Dimension");
Id reportingDB = database("Reporting", "reporting database", "Jane BI",
"hdfs://host:8000/apps/warehouse/reporting");
Id reportingDB =
database("Reporting", "reporting database", "Jane BI", "hdfs://host:8000/apps/warehouse/reporting");
Id salesFactDaily = table("sales_fact_daily_mv",
"sales fact daily materialized view", reportingDB, sd,
"Joe BI", "Managed", salesFactColumns, "Metric");
Id salesFactDaily =
table("sales_fact_daily_mv", "sales fact daily materialized view", reportingDB, sd, "Joe BI", "Managed",
salesFactColumns, "Metric");
loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL",
ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily),
"create table as select ", "plan", "id", "graph",
"ETL");
loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL", ImmutableList.of(salesFact, timeDim),
ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL");
view("product_dim_view", reportingDB,
ImmutableList.of(productDim), "Dimension", "JdbcAccess");
view("product_dim_view", reportingDB, ImmutableList.of(productDim), "Dimension", "JdbcAccess");
view("customer_dim_view", reportingDB,
ImmutableList.of(customerDim), "Dimension", "JdbcAccess");
view("customer_dim_view", reportingDB, ImmutableList.of(customerDim), "Dimension", "JdbcAccess");
Id salesFactMonthly = table("sales_fact_monthly_mv",
"sales fact monthly materialized view",
reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric");
Id salesFactMonthly =
table("sales_fact_monthly_mv", "sales fact monthly materialized view", reportingDB, sd, "Jane BI",
"Managed", salesFactColumns, "Metric");
loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL",
ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly),
"create table as select ", "plan", "id", "graph",
"ETL");
loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", ImmutableList.of(salesFactDaily),
ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL");
}
private Id createInstance(Referenceable referenceable) throws Exception {
......@@ -298,9 +249,8 @@ public class QuickStart {
return new Id(guid, referenceable.getId().getVersion(), referenceable.getTypeName());
}
Id database(String name, String description,
String owner, String locationUri,
String... traitNames) throws Exception {
Id database(String name, String description, String owner, String locationUri, String... traitNames)
throws Exception {
Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("description", description);
......@@ -311,9 +261,8 @@ public class QuickStart {
return createInstance(referenceable);
}
Referenceable rawStorageDescriptor(String location, String inputFormat,
String outputFormat,
boolean compressed) throws Exception {
Referenceable rawStorageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed)
throws Exception {
Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE);
referenceable.set("location", location);
referenceable.set("inputFormat", inputFormat);
......@@ -323,8 +272,7 @@ public class QuickStart {
return referenceable;
}
Referenceable rawColumn(String name, String dataType, String comment,
String... traitNames) throws Exception {
Referenceable rawColumn(String name, String dataType, String comment, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("dataType", dataType);
......@@ -333,11 +281,8 @@ public class QuickStart {
return referenceable;
}
Id table(String name, String description,
Id dbId, Referenceable sd,
String owner, String tableType,
List<Referenceable> columns,
String... traitNames) throws Exception {
Id table(String name, String description, Id dbId, Referenceable sd, String owner, String tableType,
List<Referenceable> columns, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(TABLE_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("description", description);
......@@ -353,12 +298,9 @@ public class QuickStart {
return createInstance(referenceable);
}
Id loadProcess(String name, String description, String user,
List<Id> inputTables,
List<Id> outputTables,
String queryText, String queryPlan,
String queryId, String queryGraph,
String... traitNames) throws Exception {
Id loadProcess(String name, String description, String user, List<Id> inputTables, List<Id> outputTables,
String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames)
throws Exception {
Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames);
// super type attributes
referenceable.set("name", name);
......@@ -378,9 +320,7 @@ public class QuickStart {
return createInstance(referenceable);
}
Id view(String name, Id dbId,
List<Id> inputTables,
String... traitNames) throws Exception {
Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("db", dbId);
......@@ -398,69 +338,51 @@ public class QuickStart {
}
private String[] getDSLQueries() {
return new String[]{
"from DB",
"DB",
"DB where name=\"Reporting\"",
"DB where DB.name=\"Reporting\"",
"DB name = \"Reporting\"",
"DB DB.name = \"Reporting\"",
"DB where name=\"Reporting\" select name, owner",
"DB where DB.name=\"Reporting\" select name, owner",
"DB has name",
"DB where DB has name",
"DB, Table",
"DB is JdbcAccess",
return new String[]{"from DB", "DB", "DB where name=\"Reporting\"", "DB where DB.name=\"Reporting\"",
"DB name = \"Reporting\"", "DB DB.name = \"Reporting\"",
"DB where name=\"Reporting\" select name, owner", "DB where DB.name=\"Reporting\" select name, owner",
"DB has name", "DB where DB has name", "DB, Table", "DB is JdbcAccess",
/*
"DB, hive_process has name",
"DB as db1, Table where db1.name = \"Reporting\"",
"DB where DB.name=\"Reporting\" and DB.createTime < " + System.currentTimeMillis()},
*/
"from Table",
"Table",
"Table is Dimension",
"Column where Column isa PII",
"View is Dimension",
"from Table", "Table", "Table is Dimension", "Column where Column isa PII", "View is Dimension",
/*"Column where Column isa PII select Column.name",*/
"Column select Column.name",
"Column select name",
"Column where Column.name=\"customer_id\"",
"from Table select Table.name",
"DB where (name = \"Reporting\")",
"DB where (name = \"Reporting\") select name as _col_0, owner as _col_1",
"DB where DB is JdbcAccess",
"DB where DB has name",
"DB Table",
"DB where DB has name",
"Column select Column.name", "Column select name", "Column where Column.name=\"customer_id\"",
"from Table select Table.name", "DB where (name = \"Reporting\")",
"DB where (name = \"Reporting\") select name as _col_0, owner as _col_1", "DB where DB is JdbcAccess",
"DB where DB has name", "DB Table", "DB where DB has name",
"DB as db1 Table where (db1.name = \"Reporting\")",
"DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ",
/*
todo: does not work
"DB where (name = \"Reporting\") and ((createTime + 1) > 0)",
"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName",
"DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName",
"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName",
"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName",
"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name
as dbName, tab.name as tabName",
"DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as
dbName, tab.name as tabName",
"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
select db1.name as dbName, tab.name as tabName",
"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
select db1.name as dbName, tab.name as tabName",
*/
// trait searches
"Dimension",
/*"Fact", - todo: does not work*/
"JdbcAccess",
"ETL",
"Metric",
"PII",
"JdbcAccess", "ETL", "Metric", "PII",
/*
// Lineage - todo - fix this, its not working
"Table hive_process outputTables",
"Table loop (hive_process outputTables)",
"Table as _loop0 loop (hive_process outputTables) withPath",
"Table as src loop (hive_process outputTables) as dest select src.name as srcTable, dest.name as destTable withPath",
"Table as src loop (hive_process outputTables) as dest select src.name as srcTable, dest.name as
destTable withPath",
*/
"Table where name=\"sales_fact\", columns",
"Table where name=\"sales_fact\", columns as column select column.name, column.dataType, column.comment",
"from DataSet",
"from Process",
};
"Table where name=\"sales_fact\", columns as column select column.name, column.dataType, column"
+ ".comment",
"from DataSet", "from Process",};
}
private void search() throws Exception {
......
......@@ -32,19 +32,20 @@ import static org.apache.atlas.security.SecurityProperties.SERVER_CERT_PASSWORD_
import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY;
/**
* A utility class for generating a credential provider containing the entries required for supporting the SSL implementation
* A utility class for generating a credential provider containing the entries required for supporting the SSL
* implementation
* of the DGC server.
*/
public class CredentialProviderUtility {
private static final String[] KEYS = new String[] {KEYSTORE_PASSWORD_KEY,
TRUSTSTORE_PASSWORD_KEY, SERVER_CERT_PASSWORD_KEY};
private static final String[] KEYS =
new String[]{KEYSTORE_PASSWORD_KEY, TRUSTSTORE_PASSWORD_KEY, SERVER_CERT_PASSWORD_KEY};
public static abstract class TextDevice {
public abstract void printf(String fmt, Object... params);
public abstract String readLine(String fmt, Object ... args);
public abstract String readLine(String fmt, Object... args);
public abstract char[] readPassword(String fmt, Object ... args);
public abstract char[] readPassword(String fmt, Object... args);
}
......@@ -57,12 +58,12 @@ public class CredentialProviderUtility {
}
@Override
public String readLine(String fmt, Object ... args) {
public String readLine(String fmt, Object... args) {
return console.readLine(fmt, args);
}
@Override
public char[] readPassword(String fmt, Object ... args) {
public char[] readPassword(String fmt, Object... args) {
return console.readPassword(fmt, args);
}
};
......@@ -113,7 +114,9 @@ public class CredentialProviderUtility {
passwd2 = textDevice.readPassword("Please enter the password value for %s again:", key);
noMatch = !Arrays.equals(passwd1, passwd2);
if (noMatch) {
if (passwd1 != null) Arrays.fill(passwd1, ' ');
if (passwd1 != null) {
Arrays.fill(passwd1, ' ');
}
textDevice.printf("Password entries don't match. Please try again.\n");
} else {
if (passwd1.length == 0) {
......@@ -123,7 +126,9 @@ public class CredentialProviderUtility {
cred = passwd1;
}
}
if (passwd2 != null) Arrays.fill(passwd2, ' ');
if (passwd2 != null) {
Arrays.fill(passwd2, ' ');
}
} while (noMatch);
return cred;
}
......@@ -138,8 +143,10 @@ public class CredentialProviderUtility {
String providerPath = textDevice.readLine("Please enter the full path to the credential provider:");
File file = new File(providerPath);
if (file.exists()) {
textDevice.printf("%s already exists. You will need to specify whether existing entries should be overwritten " +
"(default is 'yes')\n", providerPath);
textDevice
.printf("%s already exists. You will need to specify whether existing entries should be "
+ "overwritten "
+ "(default is 'yes')\n", providerPath);
}
String providerURI = JavaKeyStoreProvider.SCHEME_NAME + "://file" + providerPath;
Configuration conf = new Configuration(false);
......
......@@ -41,15 +41,12 @@ public class LoggingExceptionMapper<E extends Throwable> implements ExceptionMap
final long id = ThreadLocalRandom.current().nextLong();
logException(id, exception);
return Response.serverError()
.entity(formatErrorMessage(id, exception))
.build();
return Response.serverError().entity(formatErrorMessage(id, exception)).build();
}
@SuppressWarnings("UnusedParameters")
protected String formatErrorMessage(long id, E exception) {
return String.format(
"There was an error processing your request. It has been logged (ID %016x).", id);
return String.format("There was an error processing your request. It has been logged (ID %016x).", id);
}
protected void logException(long id, E exception) {
......
......@@ -53,9 +53,8 @@ public class AuditFilter implements Filter {
}
@Override
public void doFilter(ServletRequest request,
ServletResponse response,
FilterChain filterChain) throws IOException, ServletException {
public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain)
throws IOException, ServletException {
final String requestTimeISO9601 = DateTimeHelper.formatDateUTC(new Date());
final HttpServletRequest httpRequest = (HttpServletRequest) request;
final String requestId = UUID.randomUUID().toString();
......@@ -85,8 +84,8 @@ public class AuditFilter implements Filter {
final String whatURL = Servlets.getRequestURL(httpRequest);
final String whatAddrs = httpRequest.getLocalAddr();
LOG.debug("Audit: {}/{} performed request {} {} ({}) at time {}",
who, fromAddress, whatRequest, whatURL, whatAddrs, whenISO9601);
LOG.debug("Audit: {}/{} performed request {} {} ({}) at time {}", who, fromAddress, whatRequest, whatURL,
whatAddrs, whenISO9601);
audit(who, fromAddress, fromHost, whatURL, whatAddrs, whenISO9601);
}
......@@ -96,10 +95,10 @@ public class AuditFilter implements Filter {
return userFromRequest == null ? "UNKNOWN" : userFromRequest;
}
private void audit(String who, String fromAddress, String fromHost, String whatURL,
String whatAddrs, String whenISO9601) {
AUDIT_LOG.info("Audit: {}/{}-{} performed request {} ({}) at time {}",
who, fromAddress, fromHost, whatURL, whatAddrs, whenISO9601);
private void audit(String who, String fromAddress, String fromHost, String whatURL, String whatAddrs,
String whenISO9601) {
AUDIT_LOG.info("Audit: {}/{}-{} performed request {} ({}) at time {}", who, fromAddress, fromHost, whatURL,
whatAddrs, whenISO9601);
}
@Override
......
......@@ -24,12 +24,12 @@ import com.google.inject.servlet.GuiceServletContextListener;
import com.sun.jersey.api.core.PackagesResourceConfig;
import com.sun.jersey.guice.JerseyServletModule;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.web.filters.AuditFilter;
import org.apache.atlas.web.filters.AtlasAuthenticationFilter;
import org.apache.atlas.web.filters.AuditFilter;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.slf4j.Logger;
......@@ -57,9 +57,7 @@ public class GuiceServletConfig extends GuiceServletContextListener {
* .html
*/
if (injector == null) {
injector = Guice.createInjector(
new RepositoryMetadataModule(),
new JerseyServletModule() {
injector = Guice.createInjector(new RepositoryMetadataModule(), new JerseyServletModule() {
@Override
protected void configureServlets() {
filter("/*").through(AuditFilter.class);
......
......@@ -37,8 +37,7 @@ import java.net.UnknownHostException;
*/
public class LoginProcessor {
private static final Logger LOG = LoggerFactory
.getLogger(LoginProcessor.class);
private static final Logger LOG = LoggerFactory.getLogger(LoginProcessor.class);
public static final String METADATA_AUTHENTICATION_PREFIX = "atlas.authentication.";
public static final String AUTHENTICATION_METHOD = METADATA_AUTHENTICATION_PREFIX + "method";
public static final String AUTHENTICATION_PRINCIPAL = METADATA_AUTHENTICATION_PREFIX + "principal";
......@@ -107,8 +106,8 @@ public class LoginProcessor {
LOG.info("No authentication method configured. Defaulting to simple authentication");
authMethod = "simple";
}
SecurityUtil.setAuthenticationMethod(
UserGroupInformation.AuthenticationMethod.valueOf(authMethod.toUpperCase()),
SecurityUtil
.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.valueOf(authMethod.toUpperCase()),
hadoopConfig);
}
......
......@@ -35,8 +35,7 @@ public abstract class AbstractParam<T> {
*
* @param input an input value from a client request
*/
@SuppressWarnings({"AbstractMethodCallInConstructor",
"OverriddenMethodCallDuringObjectConstruction"})
@SuppressWarnings({"AbstractMethodCallInConstructor", "OverriddenMethodCallDuringObjectConstruction"})
protected AbstractParam(String input) {
try {
this.value = parse(input);
......@@ -57,10 +56,7 @@ public abstract class AbstractParam<T> {
* @return the {@link javax.ws.rs.core.Response} to be sent to the client
*/
protected Response error(String input, Exception e) {
return Response.status(getErrorStatus())
.entity(errorMessage(input, e))
.type(mediaType())
.build();
return Response.status(getErrorStatus()).entity(errorMessage(input, e)).type(mediaType()).build();
}
/**
......
......@@ -80,13 +80,11 @@ public class AdminResource {
public Response getVersion() {
if (version == null) {
try {
PropertiesConfiguration configProperties =
new PropertiesConfiguration("atlas-buildinfo.properties");
PropertiesConfiguration configProperties = new PropertiesConfiguration("atlas-buildinfo.properties");
JSONObject response = new JSONObject();
response.put("Version", configProperties.getString("build.version", "UNKNOWN"));
response.put("Name",
configProperties.getString("project.name", "apache-atlas"));
response.put("Name", configProperties.getString("project.name", "apache-atlas"));
response.put("Description", configProperties.getString("project.description",
"Metadata Management and Data Governance Platform over Hadoop"));
......@@ -94,8 +92,7 @@ public class AdminResource {
// response.put("Hadoop", VersionInfo.getVersion() + "-r" + VersionInfo.getRevision());
version = Response.ok(response).build();
} catch (JSONException | ConfigurationException e) {
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......
......@@ -19,8 +19,8 @@
package org.apache.atlas.web.resources;
import com.google.common.base.Preconditions;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.ParamChecker;
import org.apache.atlas.TypeNotFoundException;
import org.apache.atlas.repository.EntityNotFoundException;
......@@ -101,23 +101,19 @@ public class EntityResource {
JSONObject response = new JSONObject();
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.GUID, guid);
response.put(AtlasClient.DEFINITION,
metadataService.getEntityDefinition(guid));
response.put(AtlasClient.DEFINITION, metadataService.getEntityDefinition(guid));
return Response.created(locationURI).entity(response).build();
} catch(ValueConversionException ve) {
} catch (ValueConversionException ve) {
LOG.error("Unable to persist entity instance due to a desrialization error ", ve);
throw new WebApplicationException(
Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to persist entity instance", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to persist entity instance", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -144,24 +140,21 @@ public class EntityResource {
response.put(AtlasClient.DEFINITION, entityDefinition);
status = Response.Status.OK;
} else {
response.put(AtlasClient.ERROR, Servlets.escapeJsonString(
String.format("An entity with GUID={%s} does not exist", guid)));
response.put(AtlasClient.ERROR,
Servlets.escapeJsonString(String.format("An entity with GUID={%s} does not exist", guid)));
}
return Response.status(status).entity(response).build();
} catch (EntityNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Bad GUID={}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get instance definition for GUID {}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -188,16 +181,13 @@ public class EntityResource {
return Response.ok(response).build();
} catch (NullPointerException e) {
LOG.error("Entity type cannot be null", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for type {}", entityType, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get entity list for type {}", entityType, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -212,8 +202,7 @@ public class EntityResource {
@Path("{guid}")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response update(@PathParam("guid") String guid,
@QueryParam("property") String property,
public Response update(@PathParam("guid") String guid, @QueryParam("property") String property,
@QueryParam("value") String value) {
try {
Preconditions.checkNotNull(property, "Entity property cannot be null");
......@@ -226,20 +215,18 @@ public class EntityResource {
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to add property {} to entity id {}", property, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to add property {} to entity id {}", property, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
// Trait management functions
/**
* Gets the list of trait names for a given entity represented by a guid.
*
......@@ -263,16 +250,13 @@ public class EntityResource {
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to get trait names for entity {}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get trait names for entity {}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -285,8 +269,7 @@ public class EntityResource {
@Path("{guid}/traits")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response addTrait(@Context HttpServletRequest request,
@PathParam("guid") String guid) {
public Response addTrait(@Context HttpServletRequest request, @PathParam("guid") String guid) {
try {
final String traitDefinition = Servlets.getRequestPayload(request);
LOG.debug("Adding trait={} for entity={} ", traitDefinition, guid);
......@@ -302,16 +285,13 @@ public class EntityResource {
return Response.created(locationURI).entity(response).build();
} catch (EntityNotFoundException | TypeNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to add trait for entity={}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to add trait for entity={}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -325,8 +305,7 @@ public class EntityResource {
@Path("{guid}/traits/{traitName}")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response deleteTrait(@Context HttpServletRequest request,
@PathParam("guid") String guid,
public Response deleteTrait(@Context HttpServletRequest request, @PathParam("guid") String guid,
@PathParam(TRAIT_NAME) String traitName) {
LOG.debug("Deleting trait={} from entity={} ", traitName, guid);
try {
......@@ -340,16 +319,13 @@ public class EntityResource {
return Response.ok(response).build();
} catch (EntityNotFoundException | TypeNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
}
......@@ -71,8 +71,7 @@ public class HiveLineageResource {
@Path("table/{tableName}/inputs/graph")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response inputsGraph(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
public Response inputsGraph(@Context HttpServletRequest request, @PathParam("tableName") String tableName) {
LOG.info("Fetching lineage inputs graph for tableName={}", tableName);
try {
......@@ -87,16 +86,13 @@ public class HiveLineageResource {
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("table entity not found for {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage inputs graph for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get lineage inputs graph for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -109,8 +105,7 @@ public class HiveLineageResource {
@Path("table/{tableName}/outputs/graph")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response outputsGraph(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
public Response outputsGraph(@Context HttpServletRequest request, @PathParam("tableName") String tableName) {
LOG.info("Fetching lineage outputs graph for tableName={}", tableName);
try {
......@@ -125,16 +120,13 @@ public class HiveLineageResource {
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("table entity not found for {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage outputs graph for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get lineage outputs graph for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -147,8 +139,7 @@ public class HiveLineageResource {
@Path("table/{tableName}/schema")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response schema(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
public Response schema(@Context HttpServletRequest request, @PathParam("tableName") String tableName) {
LOG.info("Fetching schema for tableName={}", tableName);
try {
......@@ -163,16 +154,13 @@ public class HiveLineageResource {
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("table entity not found for {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get schema for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get schema for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
}
......@@ -87,9 +87,7 @@ public class MetadataDiscoveryResource {
}
final String jsonResultStr = discoveryService.searchByDSL(query);
response = new DSLJSONResponseBuilder().results(jsonResultStr)
.query(query)
.build();
response = new DSLJSONResponseBuilder().results(jsonResultStr).query(query).build();
} catch (IllegalArgumentException e) {
LOG.error("Unable to get entity list for empty query", e);
......@@ -99,22 +97,18 @@ public class MetadataDiscoveryResource {
try { //fall back to full-text
final String jsonResultStr = discoveryService.searchByFullText(query);
response = new FullTextJSonResponseBuilder().results(jsonResultStr)
.query(query)
.build();
response = new FullTextJSonResponseBuilder().results(jsonResultStr).query(query).build();
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch(Throwable e) {
} catch (Throwable e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
return Response.ok(response)
.build();
return Response.ok(response).build();
}
......@@ -133,20 +127,15 @@ public class MetadataDiscoveryResource {
ParamChecker.notEmpty(dslQuery, "dslQuery cannot be null");
final String jsonResultStr = discoveryService.searchByDSL(dslQuery);
JSONObject response = new DSLJSONResponseBuilder().results(jsonResultStr)
.query(dslQuery)
.build();
JSONObject response = new DSLJSONResponseBuilder().results(jsonResultStr).query(dslQuery).build();
return Response.ok(response)
.build();
return Response.ok(response).build();
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch(Throwable e) {
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -163,8 +152,7 @@ public class MetadataDiscoveryResource {
public Response searchUsingGremlinQuery(@QueryParam("query") String gremlinQuery) {
try {
ParamChecker.notEmpty(gremlinQuery, "gremlinQuery cannot be null or empty");
final List<Map<String, String>> results = discoveryService
.searchByGremlin(gremlinQuery);
final List<Map<String, String>> results = discoveryService.searchByGremlin(gremlinQuery);
JSONObject response = new JSONObject();
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
......@@ -178,16 +166,13 @@ public class MetadataDiscoveryResource {
response.put(AtlasClient.RESULTS, list);
response.put(AtlasClient.COUNT, list.length());
return Response.ok(response)
.build();
return Response.ok(response).build();
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch(Throwable e) {
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -207,15 +192,12 @@ public class MetadataDiscoveryResource {
final String jsonResultStr = discoveryService.searchByFullText(query);
JSONArray rowsJsonArr = new JSONArray(jsonResultStr);
JSONObject response = new FullTextJSonResponseBuilder().results(rowsJsonArr)
.query(query)
.build();
return Response.ok(response)
.build();
JSONObject response = new FullTextJSonResponseBuilder().results(rowsJsonArr).query(query).build();
return Response.ok(response).build();
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch(Throwable e) {
} catch (Throwable e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
......
......@@ -86,10 +86,8 @@ public class RexsterGraphResource {
private static void validateInputs(String errorMsg, String... inputs) {
for (String input : inputs) {
if (StringUtils.isEmpty(input)) {
throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
.entity(errorMsg)
.type("text/plain")
.build());
throw new WebApplicationException(
Response.status(Response.Status.BAD_REQUEST).entity(errorMsg).type("text/plain").build());
}
}
}
......@@ -122,12 +120,11 @@ public class RexsterGraphResource {
Vertex vertex = findVertex(vertexId);
JSONObject response = new JSONObject();
response.put(AtlasClient.RESULTS, GraphSONUtility.jsonFromElement(
vertex, getVertexIndexedKeys(), GraphSONMode.NORMAL));
response.put(AtlasClient.RESULTS,
GraphSONUtility.jsonFromElement(vertex, getVertexIndexedKeys(), GraphSONMode.NORMAL));
return Response.ok(response).build();
} catch (JSONException e) {
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -136,8 +133,7 @@ public class RexsterGraphResource {
if (vertex == null) {
String message = "Vertex with [" + vertexId + "] cannot be found.";
LOG.info(message);
throw new WebApplicationException(
Servlets.getErrorResponse(message, Response.Status.NOT_FOUND));
throw new WebApplicationException(Servlets.getErrorResponse(message, Response.Status.NOT_FOUND));
}
return vertex;
......@@ -153,8 +149,7 @@ public class RexsterGraphResource {
@Path("/vertices/properties/{id}")
@Produces({Servlets.JSON_MEDIA_TYPE})
public Response getVertexProperties(@PathParam("id") final String vertexId,
@DefaultValue("false") @QueryParam("relationships")
final String relationships) {
@DefaultValue("false") @QueryParam("relationships") final String relationships) {
LOG.info("Get vertex for vertexId= {}", vertexId);
validateInputs("Invalid argument: vertex id passed is null or empty.", vertexId);
try {
......@@ -167,8 +162,7 @@ public class RexsterGraphResource {
response.put(AtlasClient.COUNT, vertexProperties.size());
return Response.ok(response).build();
} catch (JSONException e) {
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -192,8 +186,7 @@ public class RexsterGraphResource {
@GET
@Path("/vertices")
@Produces({Servlets.JSON_MEDIA_TYPE})
public Response getVertices(@QueryParam("key") final String key,
@QueryParam("value") final String value) {
public Response getVertices(@QueryParam("key") final String key, @QueryParam("value") final String value) {
LOG.info("Get vertices for property key= {}, value= {}", key, value);
validateInputs("Invalid argument: key or value passed is null or empty.", key, value);
try {
......@@ -201,8 +194,7 @@ public class RexsterGraphResource {
return Response.ok(response).build();
} catch (JSONException e) {
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -216,20 +208,17 @@ public class RexsterGraphResource {
@GET
@Path("vertices/{id}/{direction}")
@Produces({Servlets.JSON_MEDIA_TYPE})
public Response getVertexEdges(@PathParam("id") String vertexId,
@PathParam("direction") String direction) {
public Response getVertexEdges(@PathParam("id") String vertexId, @PathParam("direction") String direction) {
LOG.info("Get vertex edges for vertexId= {}, direction= {}", vertexId, direction);
// Validate vertex id. Direction is validated in VertexQueryArguments.
validateInputs("Invalid argument: vertex id or direction passed is null or empty.",
vertexId, direction);
validateInputs("Invalid argument: vertex id or direction passed is null or empty.", vertexId, direction);
try {
Vertex vertex = findVertex(vertexId);
return getVertexEdges(vertex, direction);
} catch (JSONException e) {
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -253,8 +242,7 @@ public class RexsterGraphResource {
Iterable<Vertex> vertexQueryResults = query.vertices();
for (Vertex v : vertexQueryResults) {
if (returnType.equals(ReturnType.VERTICES)) {
elementArray.put(GraphSONUtility.jsonFromElement(
v, getVertexIndexedKeys(), GraphSONMode.NORMAL));
elementArray.put(GraphSONUtility.jsonFromElement(v, getVertexIndexedKeys(), GraphSONMode.NORMAL));
} else {
elementArray.put(v.getId());
}
......@@ -263,8 +251,7 @@ public class RexsterGraphResource {
} else if (returnType == ReturnType.EDGES) {
Iterable<Edge> edgeQueryResults = query.edges();
for (Edge e : edgeQueryResults) {
elementArray.put(GraphSONUtility.jsonFromElement(
e, getEdgeIndexedKeys(), GraphSONMode.NORMAL));
elementArray.put(GraphSONUtility.jsonFromElement(e, getEdgeIndexedKeys(), GraphSONMode.NORMAL));
counter++;
}
} else if (returnType == ReturnType.COUNT) {
......@@ -296,28 +283,25 @@ public class RexsterGraphResource {
if (edge == null) {
String message = "Edge with [" + edgeId + "] cannot be found.";
LOG.info(message);
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity(Servlets.escapeJsonString(message)).build());
throw new WebApplicationException(
Response.status(Response.Status.NOT_FOUND).entity(Servlets.escapeJsonString(message)).build());
}
JSONObject response = new JSONObject();
response.put(AtlasClient.RESULTS, GraphSONUtility.jsonFromElement(
edge, getEdgeIndexedKeys(), GraphSONMode.NORMAL));
response.put(AtlasClient.RESULTS,
GraphSONUtility.jsonFromElement(edge, getEdgeIndexedKeys(), GraphSONMode.NORMAL));
return Response.ok(response).build();
} catch (JSONException e) {
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
private <T extends Element> JSONObject buildJSONResponse(Iterable<T> elements)
throws JSONException {
private <T extends Element> JSONObject buildJSONResponse(Iterable<T> elements) throws JSONException {
JSONArray vertexArray = new JSONArray();
long counter = 0;
for (Element element : elements) {
counter++;
vertexArray.put(GraphSONUtility.jsonFromElement(
element, getVertexIndexedKeys(), GraphSONMode.NORMAL));
vertexArray.put(GraphSONUtility.jsonFromElement(element, getVertexIndexedKeys(), GraphSONMode.NORMAL));
}
JSONObject response = new JSONObject();
......@@ -326,6 +310,7 @@ public class RexsterGraphResource {
return response;
}
private enum ReturnType {VERTICES, EDGES, COUNT, VERTEX_IDS}
/**
......@@ -388,8 +373,7 @@ public class RexsterGraphResource {
countOnly = false;
} else {
throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
.entity(Servlets.escapeJsonString(directionSegment + " segment was invalid."))
.build());
.entity(Servlets.escapeJsonString(directionSegment + " segment was invalid.")).build());
}
}
......
......@@ -19,8 +19,8 @@
package org.apache.atlas.web.resources;
import com.sun.jersey.api.client.ClientResponse;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.web.util.Servlets;
......@@ -98,12 +98,10 @@ public class TypesResource {
return Response.status(ClientResponse.Status.CREATED).entity(response).build();
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to persist types", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to persist types", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -115,8 +113,7 @@ public class TypesResource {
@GET
@Path("{typeName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getDefinition(@Context HttpServletRequest request,
@PathParam("typeName") String typeName) {
public Response getDefinition(@Context HttpServletRequest request, @PathParam("typeName") String typeName) {
try {
final String typeDefinition = metadataService.getTypeDefinition(typeName);
......@@ -128,16 +125,13 @@ public class TypesResource {
return Response.ok(response).build();
} catch (AtlasException e) {
LOG.error("Unable to get type definition for type {}", typeName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (JSONException | IllegalArgumentException e) {
LOG.error("Unable to get type definition for type {}", typeName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get type definition for type {}", typeName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -174,8 +168,7 @@ public class TypesResource {
Servlets.getErrorResponse("Unsupported type " + type, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get types list", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
}
......@@ -66,8 +66,7 @@ public class EmbeddedServer {
private Integer getBufferSize() {
try {
PropertiesConfiguration configuration = new PropertiesConfiguration(
"application.properties");
PropertiesConfiguration configuration = new PropertiesConfiguration("application.properties");
return configuration.getInt("atlas.jetty.request.buffer.size", DEFAULT_BUFFER_SIZE);
} catch (ConfigurationException e) {
// do nothing
......
......@@ -85,12 +85,11 @@ public class SecureEmbeddedServer extends EmbeddedServer {
LOG.info("Attempting to retrieve password from configured credential provider path");
Configuration c = new Configuration();
c.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, provider);
CredentialProvider credentialProvider =
CredentialProviderFactory.getProviders(c).get(0);
CredentialProvider credentialProvider = CredentialProviderFactory.getProviders(c).get(0);
CredentialProvider.CredentialEntry entry = credentialProvider.getCredentialEntry(key);
if (entry == null) {
throw new IOException(String.format("No credential entry found for %s. " +
"Please create an entry in the configured credential provider", key));
throw new IOException(String.format("No credential entry found for %s. "
+ "Please create an entry in the configured credential provider", key));
} else {
password = String.valueOf(entry.getCredential());
}
......
......@@ -33,8 +33,7 @@ public final class DateTimeHelper {
public static final String ISO8601_FORMAT = "yyyy-MM-dd'T'HH:mm'Z'";
private static final String DATE_PATTERN =
"(2\\d\\d\\d|19\\d\\d)-(0[1-9]|1[012])-(0[1-9]|1[0-9]|2[0-9]|3[01])T" +
"([0-1][0-9]|2[0-3]):([0-5][0-9])Z";
"(2\\d\\d\\d|19\\d\\d)-(0[1-9]|1[012])-(0[1-9]|1[0-9]|2[0-9]|3[01])T" + "([0-1][0-9]|2[0-3]):([0-5][0-9])Z";
private static final Pattern PATTERN = Pattern.compile(DATE_PATTERN);
private DateTimeHelper() {
......@@ -65,12 +64,10 @@ public final class DateTimeHelper {
}
}
public static String formatDateUTCToISO8601(final String dateString,
final String dateStringFormat) {
public static String formatDateUTCToISO8601(final String dateString, final String dateStringFormat) {
try {
DateFormat dateFormat = new SimpleDateFormat(
dateStringFormat.substring(0, dateString.length()));
DateFormat dateFormat = new SimpleDateFormat(dateStringFormat.substring(0, dateString.length()));
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
return DateTimeHelper.formatDateUTC(dateFormat.parse(dateString));
} catch (ParseException e) {
......@@ -98,10 +95,8 @@ public final class DateTimeHelper {
String month = matcher.group(2);
String day = matcher.group(3);
if (day.equals("31")
&& (month.equals("4") || month.equals("6")
|| month.equals("9") || month.equals("11")
|| month.equals("04") || month.equals("06") || month.equals("09"))) {
if (day.equals("31") && (month.equals("4") || month.equals("6") || month.equals("9") || month
.equals("11") || month.equals("04") || month.equals("06") || month.equals("09"))) {
return false; // only 1,3,5,7,8,10,12 has 31 days
} else if (month.equals("2") || month.equals("02")) {
// leap year
......
......@@ -41,6 +41,7 @@ import java.io.StringWriter;
public final class Servlets {
private static final Logger LOG = LoggerFactory.getLogger(Servlets.class);
private Servlets() {
/* singleton */
}
......@@ -128,11 +129,7 @@ public final class Servlets {
} catch (JSONException jsonE) {
LOG.warn("Could not construct error Json rensponse", jsonE);
}
return Response
.status(status)
.entity(errorEntity)
.type(JSON_MEDIA_TYPE)
.build();
return Response.status(status).entity(errorEntity).type(JSON_MEDIA_TYPE).build();
}
public static String getRequestPayload(HttpServletRequest request) throws IOException {
......
......@@ -56,25 +56,24 @@ public class CredentialProviderUtilityIT {
System.out.print(String.format(fmt, params));
}
public String readLine(String fmt, Object ... args) {
public String readLine(String fmt, Object... args) {
return finalTestPath.toString();
}
@Override
public char[] readPassword(String fmt, Object ... args) {
public char[] readPassword(String fmt, Object... args) {
return defaultPass;
}
};
CredentialProviderUtility.main(new String[] {});
CredentialProviderUtility.main(new String[]{});
String providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + testPath.toUri();
Configuration conf = new Configuration(false);
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider.CredentialEntry entry =
provider.getCredentialEntry(SecurityProperties.KEYSTORE_PASSWORD_KEY);
......@@ -113,12 +112,12 @@ public class CredentialProviderUtilityIT {
System.out.print(String.format(fmt, params));
}
public String readLine(String fmt, Object ... args) {
public String readLine(String fmt, Object... args) {
return finalTestPath.toString();
}
@Override
public char[] readPassword(String fmt, Object ... args) {
public char[] readPassword(String fmt, Object... args) {
List<char[]> responses = new ArrayList<>();
responses.add(new char[0]);
responses.add(defaultPass);
......@@ -129,15 +128,14 @@ public class CredentialProviderUtilityIT {
}
};
CredentialProviderUtility.main(new String[] {});
CredentialProviderUtility.main(new String[]{});
String providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + testPath.toUri();
Configuration conf = new Configuration(false);
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider.CredentialEntry entry =
provider.getCredentialEntry(SecurityProperties.KEYSTORE_PASSWORD_KEY);
......@@ -161,20 +159,21 @@ public class CredentialProviderUtilityIT {
CredentialProviderUtility.textDevice = new CredentialProviderUtility.TextDevice() {
int i = 0;
@Override
public void printf(String fmt, Object... params) {
System.out.print(String.format(fmt, params));
}
public String readLine(String fmt, Object ... args) {
public String readLine(String fmt, Object... args) {
return finalTestPath.toString();
}
@Override
public char[] readPassword(String fmt, Object ... args) {
public char[] readPassword(String fmt, Object... args) {
List<char[]> responses = new ArrayList<>();
responses.add(defaultPass);
responses.add(new char[] {'b', 'a', 'd', 'p', 'a', 's', 's'});
responses.add(new char[]{'b', 'a', 'd', 'p', 'a', 's', 's'});
responses.add(defaultPass);
int item = i % 3;
......@@ -183,15 +182,14 @@ public class CredentialProviderUtilityIT {
}
};
CredentialProviderUtility.main(new String[] {});
CredentialProviderUtility.main(new String[]{});
String providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + testPath.toUri();
Configuration conf = new Configuration(false);
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider.CredentialEntry entry =
provider.getCredentialEntry(SecurityProperties.KEYSTORE_PASSWORD_KEY);
......@@ -218,17 +216,17 @@ public class CredentialProviderUtilityIT {
System.out.print(String.format(fmt, params));
}
public String readLine(String fmt, Object ... args) {
public String readLine(String fmt, Object... args) {
return finalTestPath.toString();
}
@Override
public char[] readPassword(String fmt, Object ... args) {
public char[] readPassword(String fmt, Object... args) {
return defaultPass;
}
};
CredentialProviderUtility.main(new String[] {});
CredentialProviderUtility.main(new String[]{});
// now attempt to overwrite values
CredentialProviderUtility.textDevice = new CredentialProviderUtility.TextDevice() {
......@@ -240,25 +238,24 @@ public class CredentialProviderUtilityIT {
System.out.print(String.format(fmt, params));
}
public String readLine(String fmt, Object ... args) {
public String readLine(String fmt, Object... args) {
return i++ == 0 ? finalTestPath.toString() : "y";
}
@Override
public char[] readPassword(String fmt, Object ... args) {
return new char[] {'n', 'e', 'w', 'p', 'a', 's', 's'};
public char[] readPassword(String fmt, Object... args) {
return new char[]{'n', 'e', 'w', 'p', 'a', 's', 's'};
}
};
CredentialProviderUtility.main(new String[] {});
CredentialProviderUtility.main(new String[]{});
String providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + testPath.toUri();
Configuration conf = new Configuration(false);
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
char[] newpass = "newpass".toCharArray();
CredentialProvider.CredentialEntry entry =
......
......@@ -61,7 +61,7 @@ public class MetadataAuthenticationKerberosFilterIT extends BaseSecurityTest {
}
}
@Test (enabled = false)
@Test(enabled = false)
public void testKerberosBasedLogin() throws Exception {
String originalConf = System.getProperty("metadata.conf");
System.setProperty("metadata.conf", System.getProperty("user.dir"));
......
......@@ -43,7 +43,7 @@ public class MetadataAuthenticationSimpleFilterIT extends BaseSecurityTest {
}
}
@Test (enabled = false)
@Test(enabled = false)
public void testSimpleLogin() throws Exception {
String originalConf = System.getProperty("metadata.conf");
System.setProperty("metadata.conf", System.getProperty("user.dir"));
......@@ -55,7 +55,7 @@ public class MetadataAuthenticationSimpleFilterIT extends BaseSecurityTest {
startEmbeddedServer(server.getServer());
URL url = new URL("http://localhost:23001");
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.connect();
......@@ -66,7 +66,7 @@ public class MetadataAuthenticationSimpleFilterIT extends BaseSecurityTest {
}
url = new URL("http://localhost:23001/?user.name=testuser");
connection = (HttpURLConnection)url.openConnection();
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.connect();
......
......@@ -32,8 +32,7 @@ import java.io.File;
*/
public class LoginProcessorIT extends BaseSecurityTest {
protected static final String kerberosRule =
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT";
protected static final String kerberosRule = "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT";
@Test
public void testDefaultSimpleLogin() throws Exception {
......
......@@ -42,28 +42,21 @@ public class AdminJerseyResourceIT extends BaseResourceIT {
@Test
public void testGetVersion() throws Exception {
WebResource resource = service
.path("api/atlas/admin/version");
WebResource resource = service.path("api/atlas/admin/version");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
PropertiesConfiguration buildConfiguration =
new PropertiesConfiguration("atlas-buildinfo.properties");
PropertiesConfiguration buildConfiguration = new PropertiesConfiguration("atlas-buildinfo.properties");
JSONObject response = new JSONObject(responseAsString);
Assert.assertEquals(response.get("Version"),
buildConfiguration.getString("build.version"));
Assert.assertEquals(response.get("Name"),
buildConfiguration.getString("project.name"));
Assert.assertEquals(response.get("Description"),
buildConfiguration.getString("project.description"));
Assert.assertEquals(response.get("Version"), buildConfiguration.getString("build.version"));
Assert.assertEquals(response.get("Name"), buildConfiguration.getString("project.name"));
Assert.assertEquals(response.get("Description"), buildConfiguration.getString("project.description"));
}
}
......@@ -62,19 +62,16 @@ public abstract class BaseResourceIT {
protected void createType(TypesDef typesDef) throws Exception {
HierarchicalTypeDefinition<ClassType> sampleType = typesDef.classTypesAsJavaList().get(0);
if (serviceClient.getType(sampleType.typeName) == null ) {
if (serviceClient.getType(sampleType.typeName) == null) {
String typesAsJSON = TypesSerialization.toJson(typesDef);
createType(typesAsJSON);
}
}
protected void createType(String typesAsJSON) throws Exception {
WebResource resource = service
.path("api/atlas/types");
WebResource resource = service.path("api/atlas/types");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.POST, ClientResponse.class, typesAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.CREATED.getStatusCode());
......
......@@ -98,8 +98,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@DataProvider
public Object[][] invalidAttrValues() {
return new Object[][]{
{null}, {""}, {" "}};
return new Object[][]{{null}, {""}, {" "}};
}
@Test(dataProvider = "invalidAttrValues")
......@@ -111,7 +110,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
try {
createInstance(databaseInstance);
Assert.fail("Exptected MetadataServiceException");
} catch(AtlasServiceException e) {
} catch (AtlasServiceException e) {
Assert.assertEquals(e.getStatus(), ClientResponse.Status.BAD_REQUEST);
}
}
......@@ -124,8 +123,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
databaseInstance.set("name", DATABASE_NAME);
databaseInstance.set("description", "foo database");
Referenceable tableInstance = new Referenceable(TABLE_TYPE,
"classification", "pii", "phi", "pci", "sox", "sec", "finance");
Referenceable tableInstance =
new Referenceable(TABLE_TYPE, "classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table");
tableInstance.set("date", "2014-07-11");
......@@ -234,22 +233,15 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}
private ClientResponse addProperty(String guid, String property, String value) {
WebResource resource = service
.path("api/atlas/entities")
.path(guid);
WebResource resource = service.path("api/atlas/entities").path(guid);
return resource.queryParam("property", property).queryParam("value", value)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.PUT, ClientResponse.class);
return resource.queryParam("property", property).queryParam("value", value).accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE).method(HttpMethod.PUT, ClientResponse.class);
}
private ClientResponse getEntityDefinition(String guid) {
WebResource resource = service
.path("api/atlas/entities")
.path(guid);
return resource.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
WebResource resource = service.path("api/atlas/entities").path(guid);
return resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
}
......@@ -264,13 +256,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@Test
public void testGetInvalidEntityDefinition() throws Exception {
WebResource resource = service
.path("api/atlas/entities")
.path("blah");
WebResource resource = service.path("api/atlas/entities").path("blah");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
......@@ -284,12 +272,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@Test(dependsOnMethods = "testSubmitEntity")
public void testGetEntityList() throws Exception {
ClientResponse clientResponse = service
.path("api/atlas/entities")
.queryParam("type", TABLE_TYPE)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
ClientResponse clientResponse =
service.path("api/atlas/entities").queryParam("type", TABLE_TYPE).accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE).method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
......@@ -305,12 +290,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@Test
public void testGetEntityListForBadEntityType() throws Exception {
ClientResponse clientResponse = service
.path("api/atlas/entities")
.queryParam("type", "blah")
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
ClientResponse clientResponse =
service.path("api/atlas/entities").queryParam("type", "blah").accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE).method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
......@@ -326,12 +308,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
public void testGetEntityListForNoInstances() throws Exception {
addNewType();
ClientResponse clientResponse = service
.path("api/atlas/entities")
.queryParam("type", "test")
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
ClientResponse clientResponse =
service.path("api/atlas/entities").queryParam("type", "test").accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE).method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
......@@ -345,8 +324,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}
private void addNewType() throws Exception {
HierarchicalTypeDefinition<ClassType> testTypeDefinition =
TypesUtil.createClassTypeDef("test", ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> testTypeDefinition = TypesUtil
.createClassTypeDef("test", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
......@@ -357,13 +336,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@Test(dependsOnMethods = "testSubmitEntity")
public void testGetTraitNames() throws Exception {
final String guid = tableId._getId();
ClientResponse clientResponse = service
.path("api/atlas/entities")
.path(guid)
.path(TRAITS)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
ClientResponse clientResponse =
service.path("api/atlas/entities").path(guid).path(TRAITS).accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE).method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
......@@ -391,11 +366,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
LOG.debug("traitInstanceAsJSON = " + traitInstanceAsJSON);
final String guid = tableId._getId();
ClientResponse clientResponse = service
.path("api/atlas/entities")
.path(guid)
.path(TRAITS)
.accept(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse =
service.path("api/atlas/entities").path(guid).path(TRAITS).accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.CREATED.getStatusCode());
......@@ -417,11 +389,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
LOG.debug("traitInstanceAsJSON = " + traitInstanceAsJSON);
final String guid = tableId._getId();
ClientResponse clientResponse = service
.path("api/atlas/entities")
.path(guid)
.path(TRAITS)
.accept(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse =
service.path("api/atlas/entities").path(guid).path(TRAITS).accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
......@@ -430,8 +399,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@Test(dependsOnMethods = "testGetTraitNames")
public void testAddTraitWithAttribute() throws Exception {
final String traitName = "PII_Trait" + randomString();
HierarchicalTypeDefinition<TraitType> piiTrait =
TypesUtil.createTraitTypeDef(traitName, ImmutableList.<String>of(),
HierarchicalTypeDefinition<TraitType> piiTrait = TypesUtil
.createTraitTypeDef(traitName, ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
String traitDefinitionAsJSON = TypesSerialization$.MODULE$.toJson(piiTrait, true);
LOG.debug("traitDefinitionAsJSON = " + traitDefinitionAsJSON);
......@@ -443,11 +412,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
LOG.debug("traitInstanceAsJSON = " + traitInstanceAsJSON);
final String guid = tableId._getId();
ClientResponse clientResponse = service
.path("api/atlas/entities")
.path(guid)
.path(TRAITS)
.accept(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse =
service.path("api/atlas/entities").path(guid).path(TRAITS).accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.CREATED.getStatusCode());
......@@ -487,11 +453,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
String traitInstanceAsJSON = InstanceSerialization$.MODULE$.toJson(traitInstance, true);
LOG.debug("traitInstanceAsJSON = " + traitInstanceAsJSON);
ClientResponse clientResponse = service
.path("api/atlas/entities")
.path("random")
.path(TRAITS)
.accept(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse =
service.path("api/atlas/entities").path("random").path(TRAITS).accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
......@@ -501,13 +464,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
public void testDeleteTrait() throws Exception {
final String guid = tableId._getId();
ClientResponse clientResponse = service
.path("api/atlas/entities")
.path(guid)
.path(TRAITS)
.path(traitName)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = service.path("api/atlas/entities").path(guid).path(TRAITS).path(traitName)
.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.DELETE, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -524,13 +482,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
public void testDeleteTraitNonExistent() throws Exception {
final String traitName = "blah_trait";
ClientResponse clientResponse = service
.path("api/atlas/entities")
.path("random")
.path(TRAITS)
.path(traitName)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = service.path("api/atlas/entities").path("random").path(TRAITS).path(traitName)
.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.DELETE, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
......@@ -558,11 +511,12 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
String attrName = random();
String attrValue = random();
HierarchicalTypeDefinition<ClassType> classTypeDefinition =
TypesUtil.createClassTypeDef(classType, ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> classTypeDefinition = TypesUtil
.createClassTypeDef(classType, ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef(attrName, DataTypes.STRING_TYPE));
TypesDef typesDef = TypeUtils.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
TypesDef typesDef = TypeUtils
.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
ImmutableList.of(classTypeDefinition));
createType(typesDef);
......@@ -577,48 +531,35 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}
private void createHiveTypes() throws Exception {
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
TypesUtil.createClassTypeDef(DATABASE_TYPE,
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition = TypesUtil
.createClassTypeDef(DATABASE_TYPE, ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
StructTypeDefinition structTypeDefinition =
new StructTypeDefinition("serdeType",
new AttributeDefinition[]{
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("serde", DataTypes.STRING_TYPE)
});
StructTypeDefinition structTypeDefinition = new StructTypeDefinition("serdeType",
new AttributeDefinition[]{TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("serde", DataTypes.STRING_TYPE)});
EnumValue values[] = {
new EnumValue("MANAGED", 1),
new EnumValue("EXTERNAL", 2),
};
EnumValue values[] = {new EnumValue("MANAGED", 1), new EnumValue("EXTERNAL", 2),};
EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("tableType", values);
HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
TypesUtil.createClassTypeDef(TABLE_TYPE,
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil
.createClassTypeDef(TABLE_TYPE, ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("date", DataTypes.DATE_TYPE),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE),
new AttributeDefinition("tableType", "tableType",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde1",
"serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde2",
"serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("database",
DATABASE_TYPE, Multiplicity.REQUIRED, true, null),
new AttributeDefinition("compressed",
DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, true, null));
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition =
TypesUtil.createTraitTypeDef("classification",
ImmutableList.<String>of(),
new AttributeDefinition("tableType", "tableType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde1", "serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde2", "serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("database", DATABASE_TYPE, Multiplicity.REQUIRED, true, null),
new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL,
true, null));
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil
.createTraitTypeDef("classification", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<TraitType> piiTrait =
TypesUtil.createTraitTypeDef("pii", ImmutableList.<String>of());
......@@ -633,12 +574,10 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
HierarchicalTypeDefinition<TraitType> financeTrait =
TypesUtil.createTraitTypeDef("finance", ImmutableList.<String>of());
TypesDef typesDef = TypeUtils.getTypesDef(
ImmutableList.of(enumTypeDefinition),
ImmutableList.of(structTypeDefinition),
ImmutableList.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait,
soxTrait, secTrait, financeTrait),
ImmutableList.of(databaseTypeDefinition, tableTypeDefinition));
TypesDef typesDef = TypeUtils
.getTypesDef(ImmutableList.of(enumTypeDefinition), ImmutableList.of(structTypeDefinition), ImmutableList
.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait, soxTrait, secTrait,
financeTrait), ImmutableList.of(databaseTypeDefinition, tableTypeDefinition));
createType(typesDef);
}
......@@ -647,8 +586,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
databaseInstance.set("name", DATABASE_NAME);
databaseInstance.set("description", "foo database");
Referenceable tableInstance = new Referenceable(TABLE_TYPE,
"classification", "pii", "phi", "pci", "sox", "sec", "finance");
Referenceable tableInstance =
new Referenceable(TABLE_TYPE, "classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table");
tableInstance.set("date", "2014-07-11T08:00:00.000Z");
......
......@@ -65,15 +65,9 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
@Test
public void testInputsGraph() throws Exception {
WebResource resource = service
.path(BASE_URI)
.path("sales_fact_monthly_mv")
.path("inputs")
.path("graph");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
WebResource resource = service.path(BASE_URI).path("sales_fact_monthly_mv").path("inputs").path("graph");
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -99,15 +93,9 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
@Test
public void testOutputsGraph() throws Exception {
WebResource resource = service
.path(BASE_URI)
.path("sales_fact")
.path("outputs")
.path("graph");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
WebResource resource = service.path(BASE_URI).path("sales_fact").path("outputs").path("graph");
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -133,14 +121,9 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
@Test
public void testSchema() throws Exception {
WebResource resource = service
.path(BASE_URI)
.path("sales_fact")
.path("schema");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
WebResource resource = service.path(BASE_URI).path("sales_fact").path("schema");
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -168,32 +151,20 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
@Test
public void testSchemaForEmptyTable() throws Exception {
WebResource resource = service
.path(BASE_URI)
.path("")
.path("schema");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
WebResource resource = service.path(BASE_URI).path("").path("schema");
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(),
Response.Status.NOT_FOUND.getStatusCode());
Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
}
@Test
public void testSchemaForInvalidTable() throws Exception {
WebResource resource = service
.path(BASE_URI)
.path("blah")
.path("schema");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
WebResource resource = service.path(BASE_URI).path("blah").path("schema");
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(),
Response.Status.NOT_FOUND.getStatusCode());
Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
}
private void setUpTypes() throws Exception {
......@@ -207,69 +178,47 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
private static final String HIVE_PROCESS_TYPE = "hive_process";
private TypesDef createTypeDefinitions() {
HierarchicalTypeDefinition<ClassType> dbClsDef
= TypesUtil.createClassTypeDef(DATABASE_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
attrDef("description", DataTypes.STRING_TYPE),
attrDef("locationUri", DataTypes.STRING_TYPE),
attrDef("owner", DataTypes.STRING_TYPE),
attrDef("createTime", DataTypes.INT_TYPE)
);
HierarchicalTypeDefinition<ClassType> columnClsDef =
TypesUtil.createClassTypeDef(COLUMN_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
attrDef("dataType", DataTypes.STRING_TYPE),
attrDef("comment", DataTypes.STRING_TYPE)
);
HierarchicalTypeDefinition<ClassType> tblClsDef =
TypesUtil.createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"),
attrDef("owner", DataTypes.STRING_TYPE),
attrDef("createTime", DataTypes.INT_TYPE),
attrDef("lastAccessTime", DataTypes.INT_TYPE),
attrDef("tableType", DataTypes.STRING_TYPE),
HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
.createClassTypeDef(DATABASE_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
attrDef("description", DataTypes.STRING_TYPE), attrDef("locationUri", DataTypes.STRING_TYPE),
attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.INT_TYPE));
HierarchicalTypeDefinition<ClassType> columnClsDef = TypesUtil
.createClassTypeDef(COLUMN_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
attrDef("dataType", DataTypes.STRING_TYPE), attrDef("comment", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<ClassType> tblClsDef = TypesUtil
.createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"),
attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.INT_TYPE),
attrDef("lastAccessTime", DataTypes.INT_TYPE), attrDef("tableType", DataTypes.STRING_TYPE),
attrDef("temporary", DataTypes.BOOLEAN_TYPE),
new AttributeDefinition("db", DATABASE_TYPE,
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("columns",
DataTypes.arrayTypeName(COLUMN_TYPE),
Multiplicity.COLLECTION, true, null)
);
HierarchicalTypeDefinition<ClassType> loadProcessClsDef =
TypesUtil.createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"),
attrDef("userName", DataTypes.STRING_TYPE),
attrDef("startTime", DataTypes.INT_TYPE),
new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
new AttributeDefinition("columns", DataTypes.arrayTypeName(COLUMN_TYPE),
Multiplicity.COLLECTION, true, null));
HierarchicalTypeDefinition<ClassType> loadProcessClsDef = TypesUtil
.createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"),
attrDef("userName", DataTypes.STRING_TYPE), attrDef("startTime", DataTypes.INT_TYPE),
attrDef("endTime", DataTypes.INT_TYPE),
attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED)
);
attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED));
HierarchicalTypeDefinition<TraitType> dimTraitDef =
TypesUtil.createTraitTypeDef("Dimension", null);
HierarchicalTypeDefinition<TraitType> dimTraitDef = TypesUtil.createTraitTypeDef("Dimension", null);
HierarchicalTypeDefinition<TraitType> factTraitDef =
TypesUtil.createTraitTypeDef("Fact", null);
HierarchicalTypeDefinition<TraitType> factTraitDef = TypesUtil.createTraitTypeDef("Fact", null);
HierarchicalTypeDefinition<TraitType> metricTraitDef =
TypesUtil.createTraitTypeDef("Metric", null);
HierarchicalTypeDefinition<TraitType> metricTraitDef = TypesUtil.createTraitTypeDef("Metric", null);
HierarchicalTypeDefinition<TraitType> etlTraitDef =
TypesUtil.createTraitTypeDef("ETL", null);
HierarchicalTypeDefinition<TraitType> etlTraitDef = TypesUtil.createTraitTypeDef("ETL", null);
HierarchicalTypeDefinition<TraitType> piiTraitDef =
TypesUtil.createTraitTypeDef("PII", null);
HierarchicalTypeDefinition<TraitType> piiTraitDef = TypesUtil.createTraitTypeDef("PII", null);
return TypeUtils.getTypesDef(
ImmutableList.<EnumTypeDefinition>of(),
ImmutableList.<StructTypeDefinition>of(),
return TypeUtils.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(dimTraitDef, factTraitDef, metricTraitDef, etlTraitDef, piiTraitDef),
ImmutableList.of(dbClsDef, columnClsDef, tblClsDef, loadProcessClsDef)
);
ImmutableList.of(dbClsDef, columnClsDef, tblClsDef, loadProcessClsDef));
}
AttributeDefinition attrDef(String name, IDataType dT) {
......@@ -280,8 +229,8 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
return attrDef(name, dT, m, false, null);
}
AttributeDefinition attrDef(String name, IDataType dT,
Multiplicity m, boolean isComposite, String reverseAttributeName) {
AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m, boolean isComposite,
String reverseAttributeName) {
Preconditions.checkNotNull(name);
Preconditions.checkNotNull(dT);
return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName);
......@@ -290,43 +239,40 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
private void setupInstances() throws Exception {
Id salesDB = database("Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales");
List<Referenceable> salesFactColumns = ImmutableList.of(column("time_id", "int", "time id"),
column("product_id", "int", "product id"), column("customer_id", "int", "customer id", "PII"),
List<Referenceable> salesFactColumns = ImmutableList
.of(column("time_id", "int", "time id"), column("product_id", "int", "product id"),
column("customer_id", "int", "customer id", "PII"),
column("sales", "double", "product id", "Metric"));
Id salesFact = table("sales_fact", "sales fact table", salesDB, "Joe", "Managed", salesFactColumns, "Fact");
List<Referenceable> timeDimColumns = ImmutableList.of(column("time_id", "int", "time id"),
column("dayOfYear", "int", "day Of Year"), column("weekDay", "int", "week Day"));
List<Referenceable> timeDimColumns = ImmutableList
.of(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"),
column("weekDay", "int", "week Day"));
Id timeDim = table("time_dim", "time dimension table", salesDB, "John Doe", "External", timeDimColumns,
"Dimension");
Id timeDim =
table("time_dim", "time dimension table", salesDB, "John Doe", "External", timeDimColumns, "Dimension");
Id reportingDB = database("Reporting", "reporting database", "Jane BI",
"hdfs://host:8000/apps/warehouse/reporting");
Id reportingDB =
database("Reporting", "reporting database", "Jane BI", "hdfs://host:8000/apps/warehouse/reporting");
Id salesFactDaily = table("sales_fact_daily_mv",
"sales fact daily materialized view",
reportingDB, "Joe BI", "Managed", salesFactColumns, "Metric");
Id salesFactDaily =
table("sales_fact_daily_mv", "sales fact daily materialized view", reportingDB, "Joe BI", "Managed",
salesFactColumns, "Metric");
loadProcess("loadSalesDaily", "John ETL",
ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily),
"create table as select ", "plan", "id", "graph",
"ETL");
loadProcess("loadSalesDaily", "John ETL", ImmutableList.of(salesFact, timeDim),
ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL");
Id salesFactMonthly = table("sales_fact_monthly_mv",
"sales fact monthly materialized view",
reportingDB, "Jane BI", "Managed", salesFactColumns, "Metric");
Id salesFactMonthly =
table("sales_fact_monthly_mv", "sales fact monthly materialized view", reportingDB, "Jane BI",
"Managed", salesFactColumns, "Metric");
loadProcess("loadSalesMonthly", "John ETL",
ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly),
"create table as select ", "plan", "id", "graph",
"ETL");
loadProcess("loadSalesMonthly", "John ETL", ImmutableList.of(salesFactDaily),
ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL");
}
Id database(String name, String description,
String owner, String locationUri,
String... traitNames) throws Exception {
Id database(String name, String description, String owner, String locationUri, String... traitNames)
throws Exception {
Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("description", description);
......@@ -337,8 +283,7 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
return createInstance(referenceable);
}
Referenceable column(String name, String dataType, String comment,
String... traitNames) throws Exception {
Referenceable column(String name, String dataType, String comment, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("dataType", dataType);
......@@ -347,9 +292,7 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
return referenceable;
}
Id table(String name, String description, Id dbId,
String owner, String tableType,
List<Referenceable> columns,
Id table(String name, String description, Id dbId, String owner, String tableType, List<Referenceable> columns,
String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames);
referenceable.set("name", name);
......@@ -366,12 +309,8 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
return createInstance(referenceable);
}
Id loadProcess(String name, String user,
List<Id> inputTables,
List<Id> outputTables,
String queryText, String queryPlan,
String queryId, String queryGraph,
String... traitNames) throws Exception {
Id loadProcess(String name, String user, List<Id> inputTables, List<Id> outputTables, String queryText,
String queryPlan, String queryId, String queryGraph, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("user", user);
......
......@@ -61,13 +61,9 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test
public void testSearchByDSL() throws Exception {
String dslQuery = "from dsl_test_type";
WebResource resource = service
.path("api/atlas/discovery/search/dsl")
.queryParam("query", dslQuery);
WebResource resource = service.path("api/atlas/discovery/search/dsl").queryParam("query", dslQuery);
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -94,28 +90,19 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test
public void testSearchByDSLForUnknownType() throws Exception {
String dslQuery = "from blah";
WebResource resource = service
.path("api/atlas/discovery/search/dsl")
.queryParam("query", dslQuery);
WebResource resource = service.path("api/atlas/discovery/search/dsl").queryParam("query", dslQuery);
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(),
Response.Status.BAD_REQUEST.getStatusCode());
Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
}
@Test
public void testSearchUsingGremlin() throws Exception {
String query = "g.V.has('type', 'dsl_test_type').toList()";
WebResource resource = service
.path("api/atlas/discovery/search")
.queryParam("query", query);
WebResource resource = service.path("api/atlas/discovery/search").queryParam("query", query);
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -132,13 +119,9 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test
public void testSearchUsingDSL() throws Exception {
String query = "from dsl_test_type";
WebResource resource = service
.path("api/atlas/discovery/search")
.queryParam("query", query);
WebResource resource = service.path("api/atlas/discovery/search").queryParam("query", query);
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -174,15 +157,13 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
}
private void createTypes() throws Exception {
HierarchicalTypeDefinition<ClassType> dslTestTypeDefinition =
TypesUtil.createClassTypeDef("dsl_test_type",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> dslTestTypeDefinition = TypesUtil
.createClassTypeDef("dsl_test_type", ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition =
TypesUtil.createTraitTypeDef("Classification",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil
.createTraitTypeDef("Classification", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<TraitType> piiTrait =
TypesUtil.createTraitTypeDef("PII_TYPE", ImmutableList.<String>of());
......@@ -197,18 +178,17 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
HierarchicalTypeDefinition<TraitType> financeTrait =
TypesUtil.createTraitTypeDef("Finance", ImmutableList.<String>of());
TypesDef typesDef = TypeUtils.getTypesDef(
ImmutableList.<EnumTypeDefinition>of(),
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait,
soxTrait, secTrait, financeTrait),
ImmutableList.of(dslTestTypeDefinition));
TypesDef typesDef = TypeUtils
.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
ImmutableList
.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait, soxTrait, secTrait,
financeTrait), ImmutableList.of(dslTestTypeDefinition));
createType(typesDef);
}
private Id createInstance() throws Exception {
Referenceable entityInstance = new Referenceable("dsl_test_type",
"Classification", "PII_TYPE", "PHI", "PCI", "SOX", "SEC", "Finance");
Referenceable entityInstance =
new Referenceable("dsl_test_type", "Classification", "PII_TYPE", "PHI", "PCI", "SOX", "SEC", "Finance");
entityInstance.set("name", "foo name");
entityInstance.set("description", "bar description");
......
......@@ -44,13 +44,9 @@ public class RexsterGraphJerseyResourceIT extends BaseResourceIT {
public void testGetVertex() throws Exception {
// todo: add a vertex before fetching it
WebResource resource = service
.path("api/atlas/graph/vertices")
.path("0");
WebResource resource = service.path("api/atlas/graph/vertices").path("0");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String response = clientResponse.getEntity(String.class);
......@@ -58,12 +54,9 @@ public class RexsterGraphJerseyResourceIT extends BaseResourceIT {
}
public void testGetVertexWithInvalidId() throws Exception {
WebResource resource = service
.path("api/atlas/graph/vertices/blah");
WebResource resource = service.path("api/atlas/graph/vertices/blah");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
}
......
......@@ -70,12 +70,9 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
String typesAsJSON = TypesSerialization.toJson(typeDefinition);
System.out.println("typesAsJSON = " + typesAsJSON);
WebResource resource = service
.path("api/atlas/types");
WebResource resource = service.path("api/atlas/types");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.POST, ClientResponse.class, typesAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.CREATED.getStatusCode());
......@@ -95,13 +92,9 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
for (HierarchicalTypeDefinition typeDefinition : typeDefinitions) {
System.out.println("typeName = " + typeDefinition.typeName);
WebResource resource = service
.path("api/atlas/types")
.path(typeDefinition.typeName);
WebResource resource = service.path("api/atlas/types").path(typeDefinition.typeName);
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -114,9 +107,9 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
String typesJson = response.getString(AtlasClient.DEFINITION);
final TypesDef typesDef = TypesSerialization.fromJson(typesJson);
List<HierarchicalTypeDefinition<ClassType>> hierarchicalTypeDefinitions = typesDef.classTypesAsJavaList();
for(HierarchicalTypeDefinition<ClassType> classType : hierarchicalTypeDefinitions) {
for(AttributeDefinition attrDef : classType.attributeDefinitions) {
if("name".equals(attrDef.name)) {
for (HierarchicalTypeDefinition<ClassType> classType : hierarchicalTypeDefinitions) {
for (AttributeDefinition attrDef : classType.attributeDefinitions) {
if ("name".equals(attrDef.name)) {
Assert.assertEquals(attrDef.isIndexable, true);
Assert.assertEquals(attrDef.isUnique, true);
}
......@@ -127,25 +120,18 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
@Test
public void testGetDefinitionForNonexistentType() throws Exception {
WebResource resource = service
.path("api/atlas/types")
.path("blah");
WebResource resource = service.path("api/atlas/types").path("blah");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
}
@Test(dependsOnMethods = "testSubmit")
public void testGetTypeNames() throws Exception {
WebResource resource = service
.path("api/atlas/types");
WebResource resource = service.path("api/atlas/types");
ClientResponse clientResponse = resource
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -168,14 +154,11 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
public void testGetTraitNames() throws Exception {
String[] traitsAdded = addTraits();
WebResource resource = service
.path("api/atlas/types");
WebResource resource = service.path("api/atlas/types");
ClientResponse clientResponse = resource
.queryParam("type", DataTypes.TypeCategory.TRAIT.name())
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
ClientResponse clientResponse =
resource.queryParam("type", DataTypes.TypeCategory.TRAIT.name()).accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE).method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
......@@ -190,14 +173,7 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
}
private String[] addTraits() throws Exception {
String[] traitNames = {
"class_trait",
"secure_trait",
"pii_trait",
"ssn_trait",
"salary_trait",
"sox_trait",
};
String[] traitNames = {"class_trait", "secure_trait", "pii_trait", "ssn_trait", "salary_trait", "sox_trait",};
for (String traitName : traitNames) {
HierarchicalTypeDefinition<TraitType> traitTypeDef =
......@@ -212,29 +188,26 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
private List<HierarchicalTypeDefinition> createHiveTypes() throws Exception {
ArrayList<HierarchicalTypeDefinition> typeDefinitions = new ArrayList<>();
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
TypesUtil.createClassTypeDef("database",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition = TypesUtil
.createClassTypeDef("database", ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
typeDefinitions.add(databaseTypeDefinition);
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil.createClassTypeDef(
"table",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil
.createClassTypeDef("table", ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("columnNames", DataTypes.arrayTypeName(DataTypes.STRING_TYPE)),
TypesUtil.createOptionalAttrDef("created", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("parameters", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE)),
TypesUtil.createOptionalAttrDef("created", DataTypes.DATE_TYPE), TypesUtil
.createOptionalAttrDef("parameters",
DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE)),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
new AttributeDefinition("database",
"database", Multiplicity.REQUIRED, false, "database"));
new AttributeDefinition("database", "database", Multiplicity.REQUIRED, false, "database"));
typeDefinitions.add(tableTypeDefinition);
HierarchicalTypeDefinition<TraitType> fetlTypeDefinition = TypesUtil.createTraitTypeDef(
"fetl",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<TraitType> fetlTypeDefinition = TypesUtil
.createTraitTypeDef("fetl", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE));
typeDefinitions.add(fetlTypeDefinition);
......
......@@ -26,7 +26,7 @@ import java.net.URL;
import static org.apache.atlas.security.SecurityProperties.CERT_STORES_CREDENTIAL_PROVIDER_PATH;
public class SecureEmbeddedServerIT extends SecureEmbeddedServerITBase{
public class SecureEmbeddedServerIT extends SecureEmbeddedServerITBase {
@Test
public void testServerConfiguredUsingCredentialProvider() throws Exception {
// setup the configuration
......@@ -47,7 +47,7 @@ public class SecureEmbeddedServerIT extends SecureEmbeddedServerITBase{
secureEmbeddedServer.server.start();
URL url = new URL("https://localhost:21443/");
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.connect();
......
......@@ -64,11 +64,9 @@ public class SecureEmbeddedServerITBase {
static {
//for localhost testing only
javax.net.ssl.HttpsURLConnection.setDefaultHostnameVerifier(
new javax.net.ssl.HostnameVerifier(){
javax.net.ssl.HttpsURLConnection.setDefaultHostnameVerifier(new javax.net.ssl.HostnameVerifier() {
public boolean verify(String hostname,
javax.net.ssl.SSLSession sslSession) {
public boolean verify(String hostname, javax.net.ssl.SSLSession sslSession) {
if (hostname.equals("localhost")) {
return true;
}
......@@ -81,7 +79,7 @@ public class SecureEmbeddedServerITBase {
}
@BeforeClass
public void setupServerURI () throws Exception {
public void setupServerURI() throws Exception {
BaseResourceIT.baseUrl = "https://localhost:21443";
}
......@@ -118,7 +116,8 @@ public class SecureEmbeddedServerITBase {
Assert.fail("Should have thrown an exception");
} catch (IOException e) {
Assert.assertEquals("No credential provider path configured for storage of certificate store passwords", e.getMessage());
Assert.assertEquals("No credential provider path configured for storage of certificate store passwords",
e.getMessage());
} finally {
secureEmbeddedServer.server.stop();
}
......@@ -173,8 +172,9 @@ public class SecureEmbeddedServerITBase {
TestListenerAdapter tla = new TestListenerAdapter();
TestNG testng = new TestNG();
testng.setTestClasses(new Class[] { AdminJerseyResourceIT.class, EntityJerseyResourceIT.class,
MetadataDiscoveryJerseyResourceIT.class, RexsterGraphJerseyResourceIT.class, TypesJerseyResourceIT.class});
testng.setTestClasses(new Class[]{AdminJerseyResourceIT.class, EntityJerseyResourceIT.class,
MetadataDiscoveryJerseyResourceIT.class, RexsterGraphJerseyResourceIT.class,
TypesJerseyResourceIT.class});
testng.addListener(tla);
testng.run();
......@@ -185,8 +185,8 @@ public class SecureEmbeddedServerITBase {
}
protected String getWarPath() {
return String.format("/target/atlas-webapp-%s",
System.getProperty("project.version", "0.1-incubating-SNAPSHOT"));
return String
.format("/target/atlas-webapp-%s", System.getProperty("project.version", "0.1-incubating-SNAPSHOT"));
}
protected void setupCredentials() throws Exception {
......@@ -196,23 +196,19 @@ public class SecureEmbeddedServerITBase {
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
// create new aliases
try {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
KEYSTORE_PASSWORD_KEY, storepass);
provider.createCredentialEntry(KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
TRUSTSTORE_PASSWORD_KEY, trustpass);
provider.createCredentialEntry(TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SERVER_CERT_PASSWORD_KEY, certpass);
provider.createCredentialEntry(SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks
provider.flush();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment