Commit fef50cee by Shwetha GS

IDE java code reformat

parent 22624786
......@@ -61,14 +61,16 @@ public class FalconImporter {
this.repository = repo;
}
private Entity getEntity(FalconClient client, EntityType type, String name) throws FalconCLIException, JAXBException {
private Entity getEntity(FalconClient client, EntityType type, String name)
throws FalconCLIException, JAXBException {
String entityStr = client.getDefinition(type.name(), name);
return (Entity) type.getUnmarshaller().unmarshal(new StringReader(entityStr));
}
public void importClusters() throws MetadataException {
try {
EntityList clusters = client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null, null);
EntityList clusters =
client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null, null);
for (EntityList.EntityElement element : clusters.getElements()) {
Cluster cluster = (Cluster) getEntity(client, EntityType.CLUSTER, element.name);
......@@ -80,7 +82,8 @@ public class FalconImporter {
acl.set("owner", cluster.getACL().getOwner());
acl.set("group", cluster.getACL().getGroup());
acl.set("permission", cluster.getACL().getPermission());
StructType aclType = typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.ACL.name());
StructType aclType =
typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.ACL.name());
clusterRef.set("acl", aclType.convert(acl, Multiplicity.REQUIRED));
}
......@@ -88,7 +91,8 @@ public class FalconImporter {
String[] parts = cluster.getTags().split(",");
List<ITypedInstance> tags = new ArrayList<>();
for (String part : parts) {
TraitType tagType = typeSystem.getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name());
TraitType tagType =
typeSystem.getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name());
String[] kv = part.trim().split("=");
Struct tag = new Struct(FalconTypeSystem.DefinedTypes.TAG.name());
tag.set("name", kv[0]);
......@@ -106,10 +110,12 @@ public class FalconImporter {
List<ITypedInstance> locations = new ArrayList<>();
for (Location loc : cluster.getLocations().getLocations()) {
Struct location = new Struct(FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION.name());
EnumType locationType = typeSystem.getDataType(EnumType.class, FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION_TYPE.name());
EnumType locationType = typeSystem.getDataType(EnumType.class,
FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION_TYPE.name());
location.set("type", locationType.fromValue(loc.getName().toUpperCase()));
location.set("path", loc.getPath());
StructType type = typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION.name());
StructType type = typeSystem
.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION.name());
locations.add(type.convert(location, Multiplicity.REQUIRED));
}
clusterRef.set("locations", locations);
......@@ -122,7 +128,8 @@ public class FalconImporter {
interfaceStruct.set("type", interfaceFld.getType().name());
interfaceStruct.set("endpoint", interfaceFld.getEndpoint());
interfaceStruct.set("version", interfaceFld.getVersion());
StructType type = typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_INTERFACE.name());
StructType type = typeSystem
.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_INTERFACE.name());
interfaces.add(type.convert(interfaceStruct, Multiplicity.REQUIRED));
}
clusterRef.set("interfaces", interfaces);
......
......@@ -54,7 +54,7 @@ public class FalconTypeSystem {
public static FalconTypeSystem getInstance() throws MetadataException {
if (INSTANCE == null) {
synchronized(LOG) {
synchronized (LOG) {
if (INSTANCE == null) {
INSTANCE = new FalconTypeSystem();
}
......@@ -73,12 +73,16 @@ public class FalconTypeSystem {
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("acl", DefinedTypes.ACL.name(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("tags", DefinedTypes.TAG.name(), Multiplicity.COLLECTION, false, null),
new AttributeDefinition("locations", TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.COLLECTION, false, null),
new AttributeDefinition("interfaces", DefinedTypes.CLUSTER_INTERFACE.name(), Multiplicity.COLLECTION, false, null),
new AttributeDefinition("properties", TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.OPTIONAL, false, null),
};
new AttributeDefinition("locations",
TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(),
Multiplicity.COLLECTION, false, null),
new AttributeDefinition("interfaces", DefinedTypes.CLUSTER_INTERFACE.name(), Multiplicity.COLLECTION,
false, null), new AttributeDefinition("properties",
TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(),
Multiplicity.OPTIONAL, false, null),};
HierarchicalTypeDefinition<ClassType> cluster =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.CLUSTER.name(), ImmutableList.<String>of(), attributeDefinitions);
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.CLUSTER.name(),
ImmutableList.<String>of(), attributeDefinitions);
LOG.debug("Created definition for " + DefinedTypes.CLUSTER.name());
return cluster;
}
......@@ -86,57 +90,52 @@ public class FalconTypeSystem {
private HierarchicalTypeDefinition<TraitType> defineTags() {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("value", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null)
};
new AttributeDefinition("value", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null)};
HierarchicalTypeDefinition<TraitType> traitType = new HierarchicalTypeDefinition<>(TraitType.class, DefinedTypes.TAG.name(), ImmutableList.<String>of(), attributeDefinitions);
HierarchicalTypeDefinition<TraitType> traitType =
new HierarchicalTypeDefinition<>(TraitType.class, DefinedTypes.TAG.name(), ImmutableList.<String>of(),
attributeDefinitions);
LOG.debug("Created definition for " + DefinedTypes.TAG.name());
traitTypeDefinitions.add(traitType);
return traitType;
}
private StructTypeDefinition defineClusterLocation() throws MetadataException {
EnumValue values[] = {
new EnumValue("WORKING", 1),
new EnumValue("STAGING", 2),
new EnumValue("TEMP", 3),
};
EnumValue values[] = {new EnumValue("WORKING", 1), new EnumValue("STAGING", 2), new EnumValue("TEMP", 3),};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_LOCATION_TYPE.name());
EnumTypeDefinition locationType = new EnumTypeDefinition(DefinedTypes.CLUSTER_LOCATION_TYPE.name(), values);
TYPE_SYSTEM.defineEnumType(locationType);
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("type", DefinedTypes.CLUSTER_LOCATION_TYPE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("path", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
};
new AttributeDefinition("type", DefinedTypes.CLUSTER_LOCATION_TYPE.name(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("path", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_LOCATION.name());
StructTypeDefinition location = new StructTypeDefinition(DefinedTypes.CLUSTER_LOCATION.name(), attributeDefinitions);
StructTypeDefinition location =
new StructTypeDefinition(DefinedTypes.CLUSTER_LOCATION.name(), attributeDefinitions);
structTypeDefinitions.add(location);
return location;
}
private StructTypeDefinition defineClusterInterface() throws MetadataException {
EnumValue values[] = {
new EnumValue("READONLY", 1),
new EnumValue("WRITE", 2),
new EnumValue("EXECUTE", 3),
new EnumValue("WORKFLOW", 4),
new EnumValue("MESSAGING", 5),
new EnumValue("REGISTRY", 6),
};
EnumValue values[] = {new EnumValue("READONLY", 1), new EnumValue("WRITE", 2), new EnumValue("EXECUTE", 3),
new EnumValue("WORKFLOW", 4), new EnumValue("MESSAGING", 5), new EnumValue("REGISTRY", 6),};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_INTERFACE_TYPE.name());
EnumTypeDefinition interfaceType = new EnumTypeDefinition(DefinedTypes.CLUSTER_INTERFACE_TYPE.name(), values);
TYPE_SYSTEM.defineEnumType(interfaceType);
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("type", DefinedTypes.CLUSTER_INTERFACE_TYPE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("endpoint", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("version", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
};
new AttributeDefinition("type", DefinedTypes.CLUSTER_INTERFACE_TYPE.name(), Multiplicity.REQUIRED,
false, null),
new AttributeDefinition("endpoint", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("version", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_INTERFACE.name());
StructTypeDefinition interfaceEntity = new StructTypeDefinition(DefinedTypes.CLUSTER_INTERFACE.name(), attributeDefinitions);
StructTypeDefinition interfaceEntity =
new StructTypeDefinition(DefinedTypes.CLUSTER_INTERFACE.name(), attributeDefinitions);
structTypeDefinitions.add(interfaceEntity);
return interfaceEntity;
}
......@@ -154,13 +153,10 @@ public class FalconTypeSystem {
private StructTypeDefinition defineACL() {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("group", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("permission", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("group", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("permission", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),};
LOG.debug("Created definition for " + DefinedTypes.ACL.name());
StructTypeDefinition acl = new StructTypeDefinition(DefinedTypes.ACL.name(), attributeDefinitions);
structTypeDefinitions.add(acl);
......
......@@ -48,11 +48,12 @@ public class FalconImporterTest {
FalconTypeSystem.getInstance();
FalconImporter importer = new FalconImporter(client, repo);
when(client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null,
null)).thenReturn(getEntityList());
when(client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null, null))
.thenReturn(getEntityList());
//TODO Set other fields in cluster
when(client.getDefinition(anyString(), anyString())).thenReturn(getCluster());
when(repo.createEntity(any(IReferenceableInstance.class), anyString())).thenReturn(UUID.randomUUID().toString());
when(repo.createEntity(any(IReferenceableInstance.class), anyString()))
.thenReturn(UUID.randomUUID().toString());
importer.importClusters();
}
......
......@@ -29,7 +29,9 @@ public class FalconTypeSystemTest {
@Test
public void testTypeSystem() throws MetadataException {
FalconTypeSystem.getInstance();
Assert.assertNotNull(TypeSystem.getInstance().getDataType(ClassType.class, FalconTypeSystem.DefinedTypes.CLUSTER.name()));
Assert.assertNotNull(TypeSystem.getInstance().getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name()));
Assert.assertNotNull(
TypeSystem.getInstance().getDataType(ClassType.class, FalconTypeSystem.DefinedTypes.CLUSTER.name()));
Assert.assertNotNull(
TypeSystem.getInstance().getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name()));
}
}
......@@ -159,9 +159,8 @@ public class HiveMetaStoreBridge {
LOG.debug("Getting reference for database {}", databaseName);
String typeName = HiveDataTypes.HIVE_DB.getName();
String dslQuery = String.format("%s where %s = '%s' and %s = '%s'", typeName,
HiveDataModelGenerator.NAME, databaseName.toLowerCase(), HiveDataModelGenerator.CLUSTER_NAME,
clusterName);
String dslQuery = String.format("%s where %s = '%s' and %s = '%s'", typeName, HiveDataModelGenerator.NAME,
databaseName.toLowerCase(), HiveDataModelGenerator.CLUSTER_NAME, clusterName);
return getEntityReferenceFromDSL(typeName, dslQuery);
}
......@@ -170,11 +169,12 @@ public class HiveMetaStoreBridge {
String typeName = HiveDataTypes.HIVE_PROCESS.getName();
//todo enable DSL
// String dslQuery = String.format("%s where queryText = \"%s\"", typeName, queryStr);
// return getEntityReferenceFromDSL(typeName, dslQuery);
// String dslQuery = String.format("%s where queryText = \"%s\"", typeName, queryStr);
// return getEntityReferenceFromDSL(typeName, dslQuery);
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()",
typeName, typeName, StringEscapeUtils.escapeJava(queryStr));
String gremlinQuery =
String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()", typeName, typeName,
StringEscapeUtils.escapeJava(queryStr));
return getEntityReferenceFromGremlin(typeName, gremlinQuery);
}
......@@ -216,9 +216,8 @@ public class HiveMetaStoreBridge {
return getEntityReferenceFromDSL(typeName, dslQuery);
}
private Referenceable getEntityReferenceFromGremlin(String typeName, String gremlinQuery) throws
AtlasServiceException,
JSONException {
private Referenceable getEntityReferenceFromGremlin(String typeName, String gremlinQuery)
throws AtlasServiceException, JSONException {
AtlasClient client = getAtlasClient();
JSONObject response = client.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(AtlasClient.RESULTS);
......@@ -236,7 +235,8 @@ public class HiveMetaStoreBridge {
//todo replace gremlin with DSL
// String dslQuery = String.format("%s as p where values = %s, tableName where name = '%s', "
// + "dbName where name = '%s' and clusterName = '%s' select p", typeName, valuesStr, tableName,
// + "dbName where name = '%s' and clusterName = '%s' select p", typeName, valuesStr,
// tableName,
// dbName, clusterName);
String datasetType = AtlasClient.DATA_SET_SUPER_TYPE;
......@@ -373,9 +373,8 @@ public class HiveMetaStoreBridge {
return partRef;
}
private void importIndexes(String db, String table,
Referenceable dbReferenceable,
Referenceable tableReferenceable) throws Exception {
private void importIndexes(String db, String table, Referenceable dbReferenceable, Referenceable tableReferenceable)
throws Exception {
List<Index> indexes = hiveClient.getIndexes(db, table, Short.MAX_VALUE);
if (indexes.size() > 0) {
for (Index index : indexes) {
......@@ -385,9 +384,8 @@ public class HiveMetaStoreBridge {
}
//todo should be idempotent
private void importIndex(Index index,
Referenceable dbReferenceable,
Referenceable tableReferenceable) throws Exception {
private void importIndex(Index index, Referenceable dbReferenceable, Referenceable tableReferenceable)
throws Exception {
LOG.info("Importing index {} for {}.{}", index.getIndexName(), dbReferenceable, tableReferenceable);
Referenceable indexRef = new Referenceable(HiveDataTypes.HIVE_INDEX.getName());
......@@ -411,7 +409,8 @@ public class HiveMetaStoreBridge {
createInstance(indexRef);
}
private Referenceable fillStorageDescStruct(StorageDescriptor storageDesc, List<Referenceable> colList) throws Exception {
private Referenceable fillStorageDescStruct(StorageDescriptor storageDesc, List<Referenceable> colList)
throws Exception {
LOG.debug("Filling storage descriptor information for " + storageDesc);
Referenceable sdReferenceable = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.getName());
......@@ -429,7 +428,8 @@ public class HiveMetaStoreBridge {
sdReferenceable.set("serdeInfo", serdeInfoStruct);
sdReferenceable.set(HiveDataModelGenerator.STORAGE_NUM_BUCKETS, storageDesc.getNumBuckets());
sdReferenceable.set(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS, storageDesc.isStoredAsSubDirectories());
sdReferenceable
.set(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS, storageDesc.isStoredAsSubDirectories());
//Use the passed column list if not null, ex: use same references for table and SD
List<FieldSchema> columns = storageDesc.getCols();
......@@ -469,8 +469,7 @@ public class HiveMetaStoreBridge {
return createInstance(sdReferenceable);
}
private List<Referenceable> getColumns(List<FieldSchema> schemaList) throws Exception
{
private List<Referenceable> getColumns(List<FieldSchema> schemaList) throws Exception {
List<Referenceable> colList = new ArrayList<>();
for (FieldSchema fs : schemaList) {
LOG.debug("Processing field " + fs);
......@@ -489,7 +488,7 @@ public class HiveMetaStoreBridge {
AtlasClient dgiClient = getAtlasClient();
//Register hive data model if its not already registered
if (dgiClient.getType(HiveDataTypes.HIVE_PROCESS.getName()) == null ) {
if (dgiClient.getType(HiveDataTypes.HIVE_PROCESS.getName()) == null) {
LOG.info("Registering Hive data model");
dgiClient.createType(dataModelGenerator.getModelAsJson());
} else {
......
......@@ -104,18 +104,18 @@ public class HiveHook implements ExecuteWithHookContext {
try {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
try {
executor.shutdown();
executor.awaitTermination(WAIT_TIME, TimeUnit.SECONDS);
executor = null;
} catch (InterruptedException ie) {
LOG.info("Interrupt received in shutdown.");
}
// shutdown client
}
});
@Override
public void run() {
try {
executor.shutdown();
executor.awaitTermination(WAIT_TIME, TimeUnit.SECONDS);
executor = null;
} catch (InterruptedException ie) {
LOG.info("Interrupt received in shutdown.");
}
// shutdown client
}
});
} catch (IllegalStateException is) {
LOG.info("Attempting to send msg while shutdown in progress.");
}
......@@ -163,15 +163,15 @@ public class HiveHook implements ExecuteWithHookContext {
fireAndForget(event);
} else {
executor.submit(new Runnable() {
@Override
public void run() {
try {
fireAndForget(event);
} catch (Throwable e) {
LOG.info("DGI hook failed", e);
}
}
});
@Override
public void run() {
try {
fireAndForget(event);
} catch (Throwable e) {
LOG.info("DGI hook failed", e);
}
}
});
}
}
......@@ -348,7 +348,7 @@ public class HiveHook implements ExecuteWithHookContext {
explain.initialize(event.conf, event.queryPlan, null);
List<Task<?>> rootTasks = event.queryPlan.getRootTasks();
return explain.getJSONPlan(null, null, rootTasks, event.queryPlan.getFetchTask(), true, false, false);
} catch(Exception e) {
} catch (Exception e) {
LOG.warn("Failed to get queryplan", e);
return new JSONObject();
}
......
......@@ -70,27 +70,22 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest {
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
// create new aliases
try {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
KEYSTORE_PASSWORD_KEY, storepass);
provider.createCredentialEntry(KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
TRUSTSTORE_PASSWORD_KEY, trustpass);
provider.createCredentialEntry(TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
"ssl.client.truststore.password", trustpass2);
provider.createCredentialEntry("ssl.client.truststore.password", trustpass2);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SERVER_CERT_PASSWORD_KEY, certpass);
provider.createCredentialEntry(SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks
provider.flush();
......@@ -132,8 +127,7 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest {
hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE,
System.getProperty("user.dir") + "/target/atlas");
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/atlas");
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true");
......
......@@ -121,8 +121,8 @@ public class HiveHookIT {
private String createTable(boolean partition) throws Exception {
String tableName = tableName();
runCommand("create table " + tableName + "(id int, name string) comment 'table comment' "
+ (partition ? " partitioned by(dt string)" : ""));
runCommand("create table " + tableName + "(id int, name string) comment 'table comment' " + (partition ?
" partitioned by(dt string)" : ""));
return tableName;
}
......@@ -146,7 +146,7 @@ public class HiveHookIT {
final Id sdId = (Id) tableRef.get("sd");
Referenceable sdRef = dgiCLient.getEntity(sdId.id);
Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS),false);
Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS), false);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered(DEFAULT_DB);
......@@ -154,7 +154,8 @@ public class HiveHookIT {
private String assertColumnIsRegistered(String colName) throws Exception {
LOG.debug("Searching for column {}", colName);
String query = String.format("%s where name = '%s'", HiveDataTypes.HIVE_COLUMN.getName(), colName.toLowerCase());
String query =
String.format("%s where name = '%s'", HiveDataTypes.HIVE_COLUMN.getName(), colName.toLowerCase());
return assertEntityIsRegistered(query, true);
}
......@@ -196,8 +197,9 @@ public class HiveHookIT {
public void testInsert() throws Exception {
String tableName = createTable();
String insertTableName = createTable();
String query = "insert into " + insertTableName + " partition(dt = '2015-01-01') select id, name from "
+ tableName + " where dt = '2015-01-01'";
String query =
"insert into " + insertTableName + " partition(dt = '2015-01-01') select id, name from " + tableName
+ " where dt = '2015-01-01'";
runCommand(query);
assertProcessIsRegistered(query);
......@@ -278,13 +280,14 @@ public class HiveHookIT {
}
private void assertProcessIsRegistered(String queryStr) throws Exception {
// String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
// normalize(queryStr));
// assertEntityIsRegistered(dslQuery, true);
// String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
// normalize(queryStr));
// assertEntityIsRegistered(dslQuery, true);
//todo replace with DSL
String typeName = HiveDataTypes.HIVE_PROCESS.getName();
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()",
typeName, typeName, normalize(queryStr));
String gremlinQuery =
String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()", typeName, typeName,
normalize(queryStr));
JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(AtlasClient.RESULTS);
Assert.assertEquals(results.length(), 1);
......@@ -307,9 +310,9 @@ public class HiveHookIT {
private String assertTableIsRegistered(String dbName, String tableName, boolean registered) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName);
String query = String.format("%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'"
+ " select t", HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(),
CLUSTER_NAME);
String query = String.format(
"%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'" + " select t",
HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME);
return assertEntityIsRegistered(query, registered);
}
......@@ -336,7 +339,7 @@ public class HiveHookIT {
Assert.assertEquals(results.length(), 1);
}
private String assertEntityIsRegistered(String dslQuery, boolean registered) throws Exception{
private String assertEntityIsRegistered(String dslQuery, boolean registered) throws Exception {
JSONArray results = dgiCLient.searchByDSL(dslQuery);
if (registered) {
Assert.assertEquals(results.length(), 1);
......
......@@ -92,7 +92,8 @@ public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty("atlas.http.authentication.type", "kerberos");
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY,
SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
......
......@@ -18,8 +18,8 @@
package org.apache.atlas.hive.hook;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.security.SecurityProperties;
......@@ -107,7 +107,8 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty("atlas.http.authentication.type", "kerberos");
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY,
SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
......@@ -215,7 +216,8 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
}
private void assertInstanceIsRegistered(final String typeName, final String colName, final String colValue) throws Exception {
private void assertInstanceIsRegistered(final String typeName, final String colName, final String colValue)
throws Exception {
Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
......
......@@ -18,8 +18,8 @@
package org.apache.atlas.hive.hook;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.security.SecurityProperties;
......@@ -73,7 +73,9 @@ public class SSLHiveHookIT {
super(port, path);
}
public Server getServer () { return server; }
public Server getServer() {
return server;
}
@Override
public PropertiesConfiguration getConfiguration() {
......@@ -113,7 +115,8 @@ public class SSLHiveHookIT {
configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY,
SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
......@@ -153,27 +156,22 @@ public class SSLHiveHookIT {
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
// create new aliases
try {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
KEYSTORE_PASSWORD_KEY, storepass);
provider.createCredentialEntry(KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
TRUSTSTORE_PASSWORD_KEY, trustpass);
provider.createCredentialEntry(TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
"ssl.client.truststore.password", trustpass2);
provider.createCredentialEntry("ssl.client.truststore.password", trustpass2);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SERVER_CERT_PASSWORD_KEY, certpass);
provider.createCredentialEntry(SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks
provider.flush();
......@@ -217,7 +215,7 @@ public class SSLHiveHookIT {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
}
private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception{
private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception {
JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
Assert.assertEquals(results.length(), 1);
}
......
......@@ -41,19 +41,15 @@ import java.util.Map.Entry;
public abstract class ABridge implements IBridge {
protected static final Logger LOG = BridgeManager.LOG;
protected ArrayList<Class<? extends AEntityBean>> typeBeanClasses
= new ArrayList<Class<? extends AEntityBean>>();
protected ArrayList<Class<? extends AEntityBean>> typeBeanClasses = new ArrayList<Class<? extends AEntityBean>>();
MetadataRepository repo;
protected ABridge(MetadataRepository repo) {
this.repo = repo;
}
protected HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name,
ImmutableList<String>
superTypes,
AttributeDefinition...
attrDefs) {
protected HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name, ImmutableList<String> superTypes,
AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(ClassType.class, name, superTypes, attrDefs);
}
......@@ -67,11 +63,10 @@ public abstract class ABridge implements IBridge {
// turn into a HiveLineageBean
try {
Class<AEntityBean> c = getTypeBeanInListByName(ref.getTypeName());
return this.convertFromITypedReferenceable(ref,
getTypeBeanInListByName(ref.getTypeName()));
return this.convertFromITypedReferenceable(ref, getTypeBeanInListByName(ref.getTypeName()));
} catch (BridgeException | InstantiationException | IllegalAccessException |
IllegalArgumentException | InvocationTargetException | NoSuchMethodException |
SecurityException e) {
IllegalArgumentException | InvocationTargetException | NoSuchMethodException |
SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
......@@ -80,8 +75,7 @@ public abstract class ABridge implements IBridge {
public String create(AEntityBean bean) throws MetadataException {
ClassType type = TypeSystem.getInstance()
.getDataType(ClassType.class, bean.getClass().getSimpleName());
ClassType type = TypeSystem.getInstance().getDataType(ClassType.class, bean.getClass().getSimpleName());
ITypedReferenceableInstance refBean = null;
try {
refBean = type.convert(this.convertToReferencable(bean), Multiplicity.REQUIRED);
......@@ -140,10 +134,10 @@ public abstract class ABridge implements IBridge {
return selfAware;
}
protected final <T extends AEntityBean> T convertFromITypedReferenceable(
ITypedReferenceableInstance instance, Class<? extends AEntityBean> c)
throws InstantiationException, IllegalAccessException, IllegalArgumentException,
InvocationTargetException, NoSuchMethodException, SecurityException, BridgeException {
protected final <T extends AEntityBean> T convertFromITypedReferenceable(ITypedReferenceableInstance instance,
Class<? extends AEntityBean> c)
throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException,
NoSuchMethodException, SecurityException, BridgeException {
if (!instance.getTypeName().equals(c.getSimpleName())) {
throw new BridgeException("ReferenceableInstance type not the same as bean");
}
......@@ -151,10 +145,8 @@ public abstract class ABridge implements IBridge {
for (Entry<String, AttributeInfo> e : instance.fieldMapping().fields.entrySet()) {
try {
String convertedName = e.getKey().substring(0, 1).toUpperCase() +
e.getKey().substring(1);
this.getClass().getMethod("set" + convertedName,
Class.forName(e.getValue().dataType().getName()))
String convertedName = e.getKey().substring(0, 1).toUpperCase() + e.getKey().substring(1);
this.getClass().getMethod("set" + convertedName, Class.forName(e.getValue().dataType().getName()))
.invoke(this, instance.get(e.getKey()));
} catch (MetadataException | ClassNotFoundException e1) {
// TODO Auto-generated catch block
......
......@@ -50,13 +50,12 @@ public class BridgeManager {
@Inject
BridgeManager(MetadataRepository rs)
throws ConfigurationException, ClassNotFoundException, InstantiationException,
IllegalAccessException, IllegalArgumentException, InvocationTargetException,
NoSuchMethodException, SecurityException {
throws ConfigurationException, ClassNotFoundException, InstantiationException, IllegalAccessException,
IllegalArgumentException, InvocationTargetException, NoSuchMethodException, SecurityException {
this.ts = TypeSystem.getInstance();
this.rs = rs;
if (System.getProperty("bridgeManager.propsFile") != null &&
System.getProperty("bridgeManager.propsFile").length() != 0) {
if (System.getProperty("bridgeManager.propsFile") != null
&& System.getProperty("bridgeManager.propsFile").length() != 0) {
setActiveBridges(System.getProperty("bridgeManager.propsFile"));
} else {
setActiveBridges(bridgeFileDefault);
......@@ -73,8 +72,7 @@ public class BridgeManager {
}
public final static HierarchicalTypeDefinition<ClassType>
convertEntityBeanToClassTypeDefinition(
public final static HierarchicalTypeDefinition<ClassType> convertEntityBeanToClassTypeDefinition(
Class<? extends AEntityBean> class1) {
ArrayList<AttributeDefinition> attDefAL = new ArrayList<AttributeDefinition>();
for (Field f : class1.getFields()) {
......@@ -87,18 +85,16 @@ public class BridgeManager {
}
}
HierarchicalTypeDefinition<ClassType> typeDef = new HierarchicalTypeDefinition<>(
ClassType.class, class1.getSimpleName(),
null, (AttributeDefinition[]) attDefAL.toArray(new AttributeDefinition[0]));
HierarchicalTypeDefinition<ClassType> typeDef =
new HierarchicalTypeDefinition<>(ClassType.class, class1.getSimpleName(), null,
(AttributeDefinition[]) attDefAL.toArray(new AttributeDefinition[0]));
return typeDef;
}
public final static AttributeDefinition convertFieldtoAttributeDefiniton(Field f)
throws MetadataException {
public final static AttributeDefinition convertFieldtoAttributeDefiniton(Field f) throws MetadataException {
return new AttributeDefinition(f.getName(), f.getType().getSimpleName(),
Multiplicity.REQUIRED, false, null);
return new AttributeDefinition(f.getName(), f.getType().getSimpleName(), Multiplicity.REQUIRED, false, null);
}
public ArrayList<ABridge> getActiveBridges() {
......@@ -116,8 +112,7 @@ public class BridgeManager {
try {
BridgeManager.LOG.info("Loading : Active Bridge List");
config.load(bridgePropFileName);
String[] activeBridgeList = ((String) config.getProperty("BridgeManager.activeBridges"))
.split(",");
String[] activeBridgeList = ((String) config.getProperty("BridgeManager.activeBridges")).split(",");
BridgeManager.LOG.info("Loaded : Active Bridge List");
BridgeManager.LOG.info("First Loaded :" + activeBridgeList[0]);
......@@ -125,8 +120,7 @@ public class BridgeManager {
Class<?> bridgeCls = (Class<?>) Class.forName(s);
if (ABridge.class.isAssignableFrom(bridgeCls)) {
System.out.println(s + " is able to be instaciated");
aBList.add((ABridge) bridgeCls.getConstructor(MetadataRepository.class)
.newInstance(rs));
aBList.add((ABridge) bridgeCls.getConstructor(MetadataRepository.class).newInstance(rs));
}
}
......
......@@ -40,44 +40,39 @@ public class BridgeTypeBootstrapper {
private boolean isSetup = false;
@Inject
BridgeTypeBootstrapper(Map<Class, IBridge> bridges)
throws MetadataException {
BridgeTypeBootstrapper(Map<Class, IBridge> bridges) throws MetadataException {
this.bridges = bridges;
}
public final static HierarchicalTypeDefinition<ClassType>
convertEntityBeanToClassTypeDefinition(
public final static HierarchicalTypeDefinition<ClassType> convertEntityBeanToClassTypeDefinition(
Class<? extends AEntityBean> class1) {
ArrayList<AttributeDefinition> attDefAL = new ArrayList<AttributeDefinition>();
for (Field f : class1.getFields()) {
try {
attDefAL.add(BridgeTypeBootstrapper.convertFieldtoAttributeDefiniton(f));
} catch (MetadataException e) {
BridgeManager.LOG.error("Class " + class1.getName()
+ " cannot be converted to TypeDefinition");
BridgeManager.LOG.error("Class " + class1.getName() + " cannot be converted to TypeDefinition");
e.printStackTrace();
}
}
HierarchicalTypeDefinition<ClassType> typeDef = new HierarchicalTypeDefinition<>(
ClassType.class, class1.getSimpleName(), null,
(AttributeDefinition[]) attDefAL
.toArray(new AttributeDefinition[0]));
HierarchicalTypeDefinition<ClassType> typeDef =
new HierarchicalTypeDefinition<>(ClassType.class, class1.getSimpleName(), null,
(AttributeDefinition[]) attDefAL.toArray(new AttributeDefinition[0]));
return typeDef;
}
public final static AttributeDefinition convertFieldtoAttributeDefiniton(
Field f) throws MetadataException {
public final static AttributeDefinition convertFieldtoAttributeDefiniton(Field f) throws MetadataException {
return new AttributeDefinition(f.getName(),
f.getType().getSimpleName().toLowerCase(), Multiplicity.REQUIRED, false, null);
return new AttributeDefinition(f.getName(), f.getType().getSimpleName().toLowerCase(), Multiplicity.REQUIRED,
false, null);
}
public synchronized boolean bootstrap() throws MetadataException {
if (isSetup)
if (isSetup) {
return false;
else {
} else {
LOG.info("Bootstrapping types");
_bootstrap();
isSetup = true;
......@@ -94,12 +89,10 @@ public class BridgeTypeBootstrapper {
}
}
private final boolean loadTypes(IBridge bridge, TypeSystem ts)
throws MetadataException {
private final boolean loadTypes(IBridge bridge, TypeSystem ts) throws MetadataException {
for (Class<? extends AEntityBean> clazz : bridge.getTypeBeanClasses()) {
LOG.info("Registering %s", clazz.getSimpleName());
ts.defineClassType(BridgeTypeBootstrapper
.convertEntityBeanToClassTypeDefinition(clazz));
ts.defineClassType(BridgeTypeBootstrapper.convertEntityBeanToClassTypeDefinition(clazz));
}
return false;
}
......
......@@ -79,8 +79,7 @@ public class HiveMetaImporter {
public static boolean databasesImport() throws MetaException, RepositoryException {
ClassType classType = null;
try {
classType = TypeSystem.getInstance()
.getDataType(ClassType.class, HiveStructureBridge.DB_CLASS_TYPE);
classType = TypeSystem.getInstance().getDataType(ClassType.class, HiveStructureBridge.DB_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
......@@ -118,8 +117,7 @@ public class HiveMetaImporter {
public static boolean tablesImport(String dbName) throws MetaException, RepositoryException {
ClassType classType = null;
try {
classType = TypeSystem.getInstance()
.getDataType(ClassType.class, HiveStructureBridge.TB_CLASS_TYPE);
classType = TypeSystem.getInstance().getDataType(ClassType.class, HiveStructureBridge.TB_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
......@@ -129,8 +127,7 @@ public class HiveMetaImporter {
return true;
}
public static boolean tableImport(String dbName, String tbName)
throws MetaException, RepositoryException {
public static boolean tableImport(String dbName, String tbName) throws MetaException, RepositoryException {
try {
Table tb = msc.getTable(dbName, tbName);
Referenceable tbRef = new Referenceable(HiveStructureBridge.TB_CLASS_TYPE);
......@@ -157,12 +154,10 @@ public class HiveMetaImporter {
return true;
}
public static boolean fieldsImport(String dbName, String tbName)
throws MetaException, RepositoryException {
public static boolean fieldsImport(String dbName, String tbName) throws MetaException, RepositoryException {
ClassType classType = null;
try {
classType = TypeSystem.getInstance()
.getDataType(ClassType.class, HiveStructureBridge.FD_CLASS_TYPE);
classType = TypeSystem.getInstance().getDataType(ClassType.class, HiveStructureBridge.FD_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
......@@ -190,8 +185,7 @@ public class HiveMetaImporter {
return true;
}
public static boolean fieldImport(String dbName, String tbName, String fdName)
throws MetaException {
public static boolean fieldImport(String dbName, String tbName, String fdName) throws MetaException {
try {
for (FieldSchema fs : msc.getFields(dbName, tbName)) {
if (fs.getName().equals(fs)) {
......
......@@ -45,60 +45,50 @@ public class HiveStructureBridge extends ABridge {
}
public boolean defineBridgeTypes(TypeSystem ts) {
ArrayList<HierarchicalTypeDefinition<?>> al
= new ArrayList<HierarchicalTypeDefinition<?>>();
ArrayList<HierarchicalTypeDefinition<?>> al = new ArrayList<HierarchicalTypeDefinition<?>>();
// TODO
//convert to helper methods
// Add to arrayList
try {
HierarchicalTypeDefinition<ClassType> databaseClassTypeDef
= new HierarchicalTypeDefinition<ClassType>("ClassType", DB_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("DESC", "STRING_TYPE", Multiplicity.OPTIONAL,
false, null),
new AttributeDefinition("DB_LOCATION_URI", "STRING_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("NAME", "STRING_TYPE", Multiplicity.REQUIRED,
false, null),
new AttributeDefinition("OWNER_TYPE", "STRING_TYPE",
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("OWNER_NAME", "STRING_TYPE",
Multiplicity.OPTIONAL, false, null)
}
);
HierarchicalTypeDefinition<ClassType> tableClassTypeDef
= new HierarchicalTypeDefinition<ClassType>("ClassType", TB_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("CREATE_TIME", "LONG_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("LAST_ACCESS_TIME", "LONG_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("OWNER", "STRING_TYPE", Multiplicity.REQUIRED,
false, null),
new AttributeDefinition("TBL_NAME", "STRING_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("TBL_TYPE", "STRING_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("VIEW_EXPANDED_TEXT", "STRING_TYPE",
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("VIEW_ORIGINAL_TEXT", "STRING_TYPE",
Multiplicity.OPTIONAL, false, null)
}
);
HierarchicalTypeDefinition<ClassType> columnClassTypeDef
= new HierarchicalTypeDefinition<ClassType>("ClassType", FD_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("COMMENT", "STRING_TYPE", Multiplicity.OPTIONAL,
false, null),
new AttributeDefinition("COLUMN_NAME", "STRING_TYPE",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("TYPE_NAME", "STRING_TYPE",
Multiplicity.REQUIRED, false, null)
}
);
HierarchicalTypeDefinition<ClassType> databaseClassTypeDef =
new HierarchicalTypeDefinition<ClassType>("ClassType", DB_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("DESC", "STRING_TYPE", Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("DB_LOCATION_URI", "STRING_TYPE", Multiplicity.REQUIRED,
false, null),
new AttributeDefinition("NAME", "STRING_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("OWNER_TYPE", "STRING_TYPE", Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("OWNER_NAME", "STRING_TYPE", Multiplicity.OPTIONAL, false,
null)});
HierarchicalTypeDefinition<ClassType> tableClassTypeDef =
new HierarchicalTypeDefinition<ClassType>("ClassType", TB_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("CREATE_TIME", "LONG_TYPE", Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("LAST_ACCESS_TIME", "LONG_TYPE", Multiplicity.REQUIRED,
false, null),
new AttributeDefinition("OWNER", "STRING_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("TBL_NAME", "STRING_TYPE", Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("TBL_TYPE", "STRING_TYPE", Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("VIEW_EXPANDED_TEXT", "STRING_TYPE", Multiplicity.OPTIONAL,
false, null),
new AttributeDefinition("VIEW_ORIGINAL_TEXT", "STRING_TYPE", Multiplicity.OPTIONAL,
false, null)});
HierarchicalTypeDefinition<ClassType> columnClassTypeDef =
new HierarchicalTypeDefinition<ClassType>("ClassType", FD_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("COMMENT", "STRING_TYPE", Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("COLUMN_NAME", "STRING_TYPE", Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("TYPE_NAME", "STRING_TYPE", Multiplicity.REQUIRED, false,
null)});
} catch (ClassNotFoundException e) {
e.printStackTrace();
......@@ -108,8 +98,7 @@ public class HiveStructureBridge extends ABridge {
try {
ts.defineClassType(htd);
} catch (MetadataException e) {
System.out.println(
htd.hierarchicalMetaTypeName + "could not be added to the type system");
System.out.println(htd.hierarchicalMetaTypeName + "could not be added to the type system");
e.printStackTrace();
}
}
......
......@@ -33,8 +33,7 @@ import java.util.ArrayList;
import java.util.List;
public class BridgeModule extends AbstractModule {
public static final Logger LOG = LoggerFactory
.getLogger(BridgeModule.class);
public static final Logger LOG = LoggerFactory.getLogger(BridgeModule.class);
@Override
protected void configure() {
......@@ -44,11 +43,9 @@ public class BridgeModule extends AbstractModule {
bind(BridgeTypeBootstrapper.class).in(Scopes.SINGLETON);
// Load the configured bridge classes and add them to the map binder
MapBinder<Class, IBridge> mapbinder = MapBinder.newMapBinder(binder(),
Class.class, IBridge.class);
MapBinder<Class, IBridge> mapbinder = MapBinder.newMapBinder(binder(), Class.class, IBridge.class);
String propsURI = System.getProperty("bridgeManager.propsFile",
"bridge-manager.properties");
String propsURI = System.getProperty("bridgeManager.propsFile", "bridge-manager.properties");
List<Class<? extends IBridge>> bridges = getBridgeClasses(propsURI);
for (Class<? extends IBridge> bridgeClass : bridges) {
......@@ -59,8 +56,7 @@ public class BridgeModule extends AbstractModule {
/*
* Get the bridge classes from the configuration file
*/
private List<Class<? extends IBridge>> getBridgeClasses(
String bridgePropFileName) {
private List<Class<? extends IBridge>> getBridgeClasses(String bridgePropFileName) {
List<Class<? extends IBridge>> aBList = new ArrayList<Class<? extends IBridge>>();
PropertiesConfiguration config = new PropertiesConfiguration();
......@@ -68,13 +64,11 @@ public class BridgeModule extends AbstractModule {
try {
LOG.info("Loading : Active Bridge List");
config.load(bridgePropFileName);
String[] activeBridgeList = ((String) config
.getProperty("BridgeManager.activeBridges")).split(",");
String[] activeBridgeList = ((String) config.getProperty("BridgeManager.activeBridges")).split(",");
LOG.info("Loaded : Active Bridge List");
for (String s : activeBridgeList) {
Class<? extends IBridge> bridgeCls = (Class<? extends IBridge>) Class
.forName(s);
Class<? extends IBridge> bridgeCls = (Class<? extends IBridge>) Class.forName(s);
aBList.add(bridgeCls);
}
......
......@@ -38,8 +38,7 @@ public class BridgeManagerTest {
BridgeManager bm = new BridgeManager(repo);
System.out.println(bm.getActiveBridges().size());
Assert.assertEquals(bm.activeBridges.get(0).getClass().getSimpleName(),
"HiveLineageBridge");
Assert.assertEquals(bm.activeBridges.get(0).getClass().getSimpleName(), "HiveLineageBridge");
}
@Test
......
......@@ -50,8 +50,7 @@ public class TestHiveLineageBridge {
String oneId;
private HiveLineage loadHiveLineageBean(String path) throws IOException {
return new Gson().fromJson(new InputStreamReader(this.getClass().getResourceAsStream(path)),
HiveLineage.class);
return new Gson().fromJson(new InputStreamReader(this.getClass().getResourceAsStream(path)), HiveLineage.class);
}
@BeforeClass
......
......@@ -86,7 +86,8 @@ public class AtlasClient {
try {
clientConfig = getClientProperties();
if (clientConfig.getBoolean(TLS_ENABLED, false)) {
// create an SSL properties configuration if one doesn't exist. SSLFactory expects a file, so forced to create a
// create an SSL properties configuration if one doesn't exist. SSLFactory expects a file, so forced
// to create a
// configuration object, persist it, then subsequently pass in an empty configuration to SSLFactory
SecureClientUtils.persistSSLClientConfiguration(clientConfig);
}
......@@ -246,12 +247,12 @@ public class AtlasClient {
* @return result json object
* @throws AtlasServiceException
*/
public JSONArray rawSearch(String typeName, String attributeName, Object attributeValue) throws
AtlasServiceException {
// String gremlinQuery = String.format(
// "g.V.has(\"typeName\",\"%s\").and(_().has(\"%s.%s\", T.eq, \"%s\")).toList()",
// typeName, typeName, attributeName, attributeValue);
// return searchByGremlin(gremlinQuery);
public JSONArray rawSearch(String typeName, String attributeName, Object attributeValue)
throws AtlasServiceException {
// String gremlinQuery = String.format(
// "g.V.has(\"typeName\",\"%s\").and(_().has(\"%s.%s\", T.eq, \"%s\")).toList()",
// typeName, typeName, attributeName, attributeValue);
// return searchByGremlin(gremlinQuery);
String dslQuery = String.format("%s where %s = \"%s\"", typeName, attributeName, attributeValue);
return searchByDSL(dslQuery);
}
......@@ -340,14 +341,12 @@ public class AtlasClient {
}
private JSONObject callAPIWithResource(API api, WebResource resource, Object requestObject)
throws AtlasServiceException {
ClientResponse clientResponse = resource
.accept(JSON_MEDIA_TYPE)
.type(JSON_MEDIA_TYPE)
throws AtlasServiceException {
ClientResponse clientResponse = resource.accept(JSON_MEDIA_TYPE).type(JSON_MEDIA_TYPE)
.method(api.getMethod(), ClientResponse.class, requestObject);
Response.Status expectedStatus = HttpMethod.POST.equals(api.getMethod())
? Response.Status.CREATED : Response.Status.OK;
Response.Status expectedStatus =
HttpMethod.POST.equals(api.getMethod()) ? Response.Status.CREATED : Response.Status.OK;
if (clientResponse.getStatus() == expectedStatus.getStatusCode()) {
String responseAsString = clientResponse.getEntity(String.class);
try {
......@@ -360,8 +359,7 @@ public class AtlasClient {
throw new AtlasServiceException(api, clientResponse);
}
private JSONObject callAPI(API api, Object requestObject,
String... pathParams) throws AtlasServiceException {
private JSONObject callAPI(API api, Object requestObject, String... pathParams) throws AtlasServiceException {
WebResource resource = getResource(api, pathParams);
return callAPIWithResource(api, resource, requestObject);
}
......
......@@ -61,10 +61,8 @@ public class SecureClientUtils {
public static URLConnectionClientHandler getClientConnectionHandler(DefaultClientConfig config,
PropertiesConfiguration clientConfig) {
config.getProperties().put(
URLConnectionClientHandler.PROPERTY_HTTP_URL_CONNECTION_SET_METHOD_WORKAROUND,
true);
PropertiesConfiguration clientConfig) {
config.getProperties().put(URLConnectionClientHandler.PROPERTY_HTTP_URL_CONNECTION_SET_METHOD_WORKAROUND, true);
Configuration conf = new Configuration(false);
conf.addResource(conf.get(SSLFactory.SSL_CLIENT_CONF_KEY, "ssl-client.xml"));
String authType = "simple";
......@@ -95,28 +93,25 @@ public class SecureClientUtils {
return new URLConnectionClientHandler(httpURLConnectionFactory);
}
private final static ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR =
new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn)
throws IOException {
setTimeouts(conn, DEFAULT_SOCKET_TIMEOUT);
return conn;
}
};
private final static ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR = new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn) throws IOException {
setTimeouts(conn, DEFAULT_SOCKET_TIMEOUT);
return conn;
}
};
private static ConnectionConfigurator newConnConfigurator(Configuration conf) {
try {
return newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
} catch (Exception e) {
LOG.debug("Cannot load customized ssl related configuration. " +
"Fallback to system-generic settings.", e);
LOG.debug("Cannot load customized ssl related configuration. " + "Fallback to system-generic settings.", e);
return DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
}
}
private static ConnectionConfigurator newSslConnConfigurator(final int timeout,
Configuration conf) throws IOException, GeneralSecurityException {
private static ConnectionConfigurator newSslConnConfigurator(final int timeout, Configuration conf)
throws IOException, GeneralSecurityException {
final SSLFactory factory;
final SSLSocketFactory sf;
final HostnameVerifier hv;
......@@ -128,8 +123,7 @@ public class SecureClientUtils {
return new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn)
throws IOException {
public HttpURLConnection configure(HttpURLConnection conn) throws IOException {
if (conn instanceof HttpsURLConnection) {
HttpsURLConnection c = (HttpsURLConnection) conn;
c.setSSLSocketFactory(sf);
......@@ -168,7 +162,8 @@ public class SecureClientUtils {
return new File(sslDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
}
public static void persistSSLClientConfiguration(PropertiesConfiguration clientConfig) throws AtlasException, IOException {
public static void persistSSLClientConfiguration(PropertiesConfiguration clientConfig)
throws AtlasException, IOException {
//trust settings
Configuration configuration = new Configuration(false);
File sslClientFile = getSSLClientFile();
......
......@@ -36,18 +36,10 @@ import java.util.Properties;
*
*/
public class BaseSecurityTest {
private static final String JAAS_ENTRY =
"%s { \n"
+ " %s required\n"
// kerberos module
+ " keyTab=\"%s\"\n"
+ " debug=true\n"
+ " principal=\"%s\"\n"
+ " useKeyTab=true\n"
+ " useTicketCache=false\n"
+ " doNotPrompt=true\n"
+ " storeKey=true;\n"
+ "}; \n";
private static final String JAAS_ENTRY = "%s { \n" + " %s required\n"
// kerberos module
+ " keyTab=\"%s\"\n" + " debug=true\n" + " principal=\"%s\"\n" + " useKeyTab=true\n"
+ " useTicketCache=false\n" + " doNotPrompt=true\n" + " storeKey=true;\n" + "}; \n";
protected MiniKdc kdc;
protected String getWarPath() {
......@@ -56,8 +48,8 @@ public class BaseSecurityTest {
}
protected void generateTestProperties(Properties props) throws ConfigurationException, IOException {
PropertiesConfiguration config = new PropertiesConfiguration(System.getProperty("user.dir") +
"/../src/conf/application.properties");
PropertiesConfiguration config =
new PropertiesConfiguration(System.getProperty("user.dir") + "/../src/conf/application.properties");
for (String propName : props.stringPropertyNames()) {
config.setProperty(propName, props.getProperty(propName));
}
......@@ -88,20 +80,11 @@ public class BaseSecurityTest {
return kdcWorkDir;
}
public String createJAASEntry(
String context,
String principal,
File keytab) {
public String createJAASEntry(String context, String principal, File keytab) {
String keytabpath = keytab.getAbsolutePath();
// fix up for windows; no-op on unix
keytabpath = keytabpath.replace('\\', '/');
return String.format(
Locale.ENGLISH,
JAAS_ENTRY,
context,
getKerberosAuthModuleForJVM(),
keytabpath,
principal);
keytabpath = keytabpath.replace('\\', '/');
return String.format(Locale.ENGLISH, JAAS_ENTRY, context, getKerberosAuthModuleForJVM(), keytabpath, principal);
}
protected String getKerberosAuthModuleForJVM() {
......@@ -119,10 +102,7 @@ public class BaseSecurityTest {
protected File createKeytab(MiniKdc kdc, File kdcWorkDir, String principal, String filename) throws Exception {
File keytab = new File(kdcWorkDir, filename);
kdc.createPrincipal(keytab,
principal,
principal + "/localhost",
principal + "/127.0.0.1");
kdc.createPrincipal(keytab, principal, principal + "/localhost", principal + "/127.0.0.1");
return keytab;
}
}
......@@ -97,7 +97,7 @@
<MaxPermGen>512m</MaxPermGen>
<SnapshotsId>apache.snapshots.repo</SnapshotsId>
<SnapshotsName>Apache Snapshot Repository</SnapshotsName>
<SnapshotsUrl> https://repository.apache.org/content/groups/snapshots</SnapshotsUrl>
<SnapshotsUrl>https://repository.apache.org/content/groups/snapshots</SnapshotsUrl>
<StagingId>apache-staging</StagingId>
<StagingName>Apache Release Distribution Repository</StagingName>
<StagingUrl>https://repository.apache.org/content/groups/staging</StagingUrl>
......@@ -110,20 +110,28 @@
<profile>
<id>Windows</id>
<activation>
<os><family>windows</family></os>
<os>
<family>windows</family>
</os>
</activation>
<properties>
<python.path.l>${project.basedir}\src\bin;${project.basedir}\src\test\python\scripts;${project.basedir}\src\test\mock</python.path.l>
<python.path.l>
${project.basedir}\src\bin;${project.basedir}\src\test\python\scripts;${project.basedir}\src\test\mock
</python.path.l>
</properties>
</profile>
<profile>
<id>Linux</id>
<activation>
<os><family>!windows</family></os>
<os>
<family>!windows</family>
</os>
</activation>
<properties>
<python.path.l>${project.basedir}/src/bin:${project.basedir}/src/test/mock:${project.basedir}/src/test/python/scripts</python.path.l>
<python.path.l>
${project.basedir}/src/bin:${project.basedir}/src/test/mock:${project.basedir}/src/test/python/scripts
</python.path.l>
</properties>
</profile>
</profiles>
......@@ -192,18 +200,18 @@
<url>http://repo.typesafe.com/typesafe/releases/</url>
</repository>
</repositories>
<distributionManagement>
<repository>
<id>${StagingId}</id>
<name>${StagingName}</name>
<url>${StagingUrl}</url>
</repository>
<snapshotRepository>
<id>${SnapshotsId}</id>
<name>${SnapshotsName}</name>
<url>${SnapshotsUrl}</url>
</snapshotRepository>
</distributionManagement>
<distributionManagement>
<repository>
<id>${StagingId}</id>
<name>${StagingName}</name>
<url>${StagingUrl}</url>
</repository>
<snapshotRepository>
<id>${SnapshotsId}</id>
<name>${SnapshotsName}</name>
<url>${SnapshotsUrl}</url>
</snapshotRepository>
</distributionManagement>
<dependencyManagement>
<dependencies>
<dependency>
......@@ -913,7 +921,8 @@
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<argLine>-Djava.awt.headless=true -Dproject.version=${project.version}
-Dhadoop.tmp.dir=${project.build.directory}/tmp-hadoop-${user.name}
-Xmx1024m -XX:MaxPermSize=512m</argLine>
-Xmx1024m -XX:MaxPermSize=512m
</argLine>
</configuration>
<dependencies>
<dependency>
......
......@@ -22,5 +22,7 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD)
public @interface GraphTransaction {}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface GraphTransaction {
}
......@@ -42,7 +42,7 @@ public class GraphTransactionInterceptor implements MethodInterceptor {
titanGraph.commit();
LOG.debug("graph commit");
return response;
} catch (Throwable t){
} catch (Throwable t) {
titanGraph.rollback();
LOG.error("graph rollback due to exception ", t);
throw t;
......
......@@ -18,8 +18,6 @@
package org.apache.atlas;
import com.google.inject.Provider;
import com.google.inject.TypeLiteral;
import com.google.inject.matcher.Matchers;
import com.google.inject.multibindings.Multibinder;
import com.google.inject.throwingproviders.ThrowingProviderBinder;
......@@ -28,7 +26,6 @@ import org.aopalliance.intercept.MethodInterceptor;
import org.apache.atlas.discovery.DiscoveryService;
import org.apache.atlas.discovery.HiveLineageService;
import org.apache.atlas.discovery.LineageService;
import org.apache.atlas.discovery.SearchIndexer;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.listener.TypesChangeListener;
import org.apache.atlas.repository.MetadataRepository;
......@@ -49,9 +46,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
@Override
protected void configure() {
// special wiring for Titan Graph
ThrowingProviderBinder.create(binder())
.bind(GraphProvider.class, TitanGraph.class)
.to(TitanGraphProvider.class)
ThrowingProviderBinder.create(binder()).bind(GraphProvider.class, TitanGraph.class).to(TitanGraphProvider.class)
.asEagerSingleton();
// allow for dynamic binding of the metadata repo & graph service
......@@ -62,7 +57,8 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// bind the ITypeStore interface to an implementation
bind(ITypeStore.class).to(GraphBackedTypeStore.class).asEagerSingleton();
Multibinder<TypesChangeListener> typesChangeListenerBinder = Multibinder.newSetBinder(binder(), TypesChangeListener.class);
Multibinder<TypesChangeListener> typesChangeListenerBinder =
Multibinder.newSetBinder(binder(), TypesChangeListener.class);
typesChangeListenerBinder.addBinding().to(GraphBackedSearchIndexer.class);
// bind the MetadataService interface to an implementation
......
......@@ -19,8 +19,8 @@
package org.apache.atlas.discovery;
import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.AtlasException;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.ParamChecker;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.discovery.graph.DefaultGraphPersistenceStrategy;
......@@ -65,20 +65,14 @@ public class HiveLineageService implements LineageService {
// todo - externalize this using type system - dog food
try {
PropertiesConfiguration conf = PropertiesUtil.getApplicationProperties();
HIVE_TABLE_TYPE_NAME =
conf.getString("atlas.lineage.hive.table.type.name", "DataSet");
HIVE_PROCESS_TYPE_NAME =
conf.getString("atlas.lineage.hive.process.type.name", "Process");
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME =
conf.getString("atlas.lineage.hive.process.inputs.name", "inputs");
HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME =
conf.getString("atlas.lineage.hive.process.outputs.name", "outputs");
HIVE_TABLE_SCHEMA_QUERY = conf.getString(
"atlas.lineage.hive.table.schema.query",
"hive_table where name=\"%s\", columns");
HIVE_TABLE_EXISTS_QUERY = conf.getString(
"atlas.lineage.hive.table.exists.query",
HIVE_TABLE_TYPE_NAME = conf.getString("atlas.lineage.hive.table.type.name", "DataSet");
HIVE_PROCESS_TYPE_NAME = conf.getString("atlas.lineage.hive.process.type.name", "Process");
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME = conf.getString("atlas.lineage.hive.process.inputs.name", "inputs");
HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME = conf.getString("atlas.lineage.hive.process.outputs.name", "outputs");
HIVE_TABLE_SCHEMA_QUERY =
conf.getString("atlas.lineage.hive.table.schema.query", "hive_table where name=\"%s\", columns");
HIVE_TABLE_EXISTS_QUERY = conf.getString("atlas.lineage.hive.table.exists.query",
"from " + HIVE_TABLE_TYPE_NAME + " where name=\"%s\"");
} catch (AtlasException e) {
throw new RuntimeException(e);
......@@ -91,9 +85,8 @@ public class HiveLineageService implements LineageService {
private final GraphBackedDiscoveryService discoveryService;
@Inject
HiveLineageService(GraphProvider<TitanGraph> graphProvider,
MetadataRepository metadataRepository,
GraphBackedDiscoveryService discoveryService) throws DiscoveryException {
HiveLineageService(GraphProvider<TitanGraph> graphProvider, MetadataRepository metadataRepository,
GraphBackedDiscoveryService discoveryService) throws DiscoveryException {
this.titanGraph = graphProvider.get();
this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
this.discoveryService = discoveryService;
......@@ -112,14 +105,13 @@ public class HiveLineageService implements LineageService {
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
HiveWhereUsedQuery outputsQuery = new HiveWhereUsedQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
HiveWhereUsedQuery outputsQuery =
new HiveWhereUsedQuery(HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, Option.empty(),
SELECT_ATTRIBUTES, true, graphPersistenceStrategy, titanGraph);
Expressions.Expression expression = outputsQuery.expr();
LOG.debug("Expression is [" + expression.toString() +"]");
LOG.debug("Expression is [" + expression.toString() + "]");
try {
return discoveryService.evaluate(expression).toJson();
} catch (Exception e) { // unable to catch ExpressionException
......@@ -140,11 +132,10 @@ public class HiveLineageService implements LineageService {
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
HiveWhereUsedQuery outputsQuery = new HiveWhereUsedQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
HiveWhereUsedQuery outputsQuery =
new HiveWhereUsedQuery(HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, Option.empty(),
SELECT_ATTRIBUTES, true, graphPersistenceStrategy, titanGraph);
return outputsQuery.graph().toInstanceJson();
}
......@@ -161,14 +152,12 @@ public class HiveLineageService implements LineageService {
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
HiveLineageQuery inputsQuery = new HiveLineageQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
HiveLineageQuery inputsQuery = new HiveLineageQuery(HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, Option.empty(),
SELECT_ATTRIBUTES, true, graphPersistenceStrategy, titanGraph);
Expressions.Expression expression = inputsQuery.expr();
LOG.debug("Expression is [" + expression.toString() +"]");
LOG.debug("Expression is [" + expression.toString() + "]");
try {
return discoveryService.evaluate(expression).toJson();
} catch (Exception e) { // unable to catch ExpressionException
......@@ -189,11 +178,9 @@ public class HiveLineageService implements LineageService {
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
HiveLineageQuery inputsQuery = new HiveLineageQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
HiveLineageQuery inputsQuery = new HiveLineageQuery(HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, Option.empty(),
SELECT_ATTRIBUTES, true, graphPersistenceStrategy, titanGraph);
return inputsQuery.graph().toInstanceJson();
}
......
......@@ -19,7 +19,6 @@
package org.apache.atlas.discovery;
import org.apache.atlas.listener.TypesChangeListener;
import org.apache.atlas.repository.IndexException;
/**
......
......@@ -47,8 +47,7 @@ import java.util.List;
*/
public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategies {
private static final Logger LOG = LoggerFactory
.getLogger(DefaultGraphPersistenceStrategy.class);
private static final Logger LOG = LoggerFactory.getLogger(DefaultGraphPersistenceStrategy.class);
private final GraphBackedMetadataRepository metadataRepository;
......@@ -104,60 +103,56 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
public <U> U constructInstance(IDataType<U> dataType, Object value) {
try {
switch (dataType.getTypeCategory()) {
case PRIMITIVE:
case ENUM:
return dataType.convert(value, Multiplicity.OPTIONAL);
case ARRAY:
// todo
break;
case MAP:
// todo
break;
case STRUCT:
TitanVertex structVertex = (TitanVertex) value;
StructType structType = (StructType) dataType;
ITypedStruct structInstance = structType.createInstance();
TypeSystem.IdType idType = TypeSystem.getInstance().getIdType();
if (dataType.getName().equals(idType.getName())) {
structInstance.set(idType.typeNameAttrName(),
structVertex.getProperty(typeAttributeName()));
structInstance.set(idType.idAttrName(),
structVertex.getProperty(idAttributeName()));
} else {
metadataRepository.getGraphToInstanceMapper().mapVertexToInstance(
structVertex, structInstance, structType.fieldMapping().fields);
}
return dataType.convert(structInstance, Multiplicity.OPTIONAL);
case TRAIT:
TitanVertex traitVertex = (TitanVertex) value;
TraitType traitType = (TraitType) dataType;
ITypedStruct traitInstance = traitType.createInstance();
// todo - this is not right, we should load the Instance associated with this
// trait. for now just loading the trait struct.
// metadataRepository.getGraphToInstanceMapper().mapVertexToTraitInstance(
// traitVertex, dataType.getName(), , traitType, traitInstance);
metadataRepository.getGraphToInstanceMapper().mapVertexToInstance(
traitVertex, traitInstance, traitType.fieldMapping().fields);
break;
case CLASS:
TitanVertex classVertex = (TitanVertex) value;
ITypedReferenceableInstance classInstance =
metadataRepository.getGraphToInstanceMapper().mapGraphToTypedInstance(
classVertex.<String>getProperty(Constants.GUID_PROPERTY_KEY),
classVertex);
return dataType.convert(classInstance, Multiplicity.OPTIONAL);
default:
throw new UnsupportedOperationException(
"Load for type " + dataType + "is not supported");
case PRIMITIVE:
case ENUM:
return dataType.convert(value, Multiplicity.OPTIONAL);
case ARRAY:
// todo
break;
case MAP:
// todo
break;
case STRUCT:
TitanVertex structVertex = (TitanVertex) value;
StructType structType = (StructType) dataType;
ITypedStruct structInstance = structType.createInstance();
TypeSystem.IdType idType = TypeSystem.getInstance().getIdType();
if (dataType.getName().equals(idType.getName())) {
structInstance.set(idType.typeNameAttrName(), structVertex.getProperty(typeAttributeName()));
structInstance.set(idType.idAttrName(), structVertex.getProperty(idAttributeName()));
} else {
metadataRepository.getGraphToInstanceMapper()
.mapVertexToInstance(structVertex, structInstance, structType.fieldMapping().fields);
}
return dataType.convert(structInstance, Multiplicity.OPTIONAL);
case TRAIT:
TitanVertex traitVertex = (TitanVertex) value;
TraitType traitType = (TraitType) dataType;
ITypedStruct traitInstance = traitType.createInstance();
// todo - this is not right, we should load the Instance associated with this
// trait. for now just loading the trait struct.
// metadataRepository.getGraphToInstanceMapper().mapVertexToTraitInstance(
// traitVertex, dataType.getName(), , traitType, traitInstance);
metadataRepository.getGraphToInstanceMapper()
.mapVertexToInstance(traitVertex, traitInstance, traitType.fieldMapping().fields);
break;
case CLASS:
TitanVertex classVertex = (TitanVertex) value;
ITypedReferenceableInstance classInstance = metadataRepository.getGraphToInstanceMapper()
.mapGraphToTypedInstance(classVertex.<String>getProperty(Constants.GUID_PROPERTY_KEY),
classVertex);
return dataType.convert(classInstance, Multiplicity.OPTIONAL);
default:
throw new UnsupportedOperationException("Load for type " + dataType + "is not supported");
}
} catch (AtlasException e) {
LOG.error("error while constructing an instance", e);
......@@ -168,9 +163,8 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
@Override
public String edgeLabel(TypeUtils.FieldInfo fInfo) {
return fInfo.reverseDataType() == null
? edgeLabel(fInfo.dataType(), fInfo.attrInfo())
: edgeLabel(fInfo.reverseDataType(), fInfo.attrInfo());
return fInfo.reverseDataType() == null ? edgeLabel(fInfo.dataType(), fInfo.attrInfo()) :
edgeLabel(fInfo.reverseDataType(), fInfo.attrInfo());
}
@Override
......@@ -184,13 +178,19 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
}
@Override
public String instanceToTraitEdgeDirection() { return "out"; }
public String instanceToTraitEdgeDirection() {
return "out";
}
@Override
public String traitToInstanceEdgeDirection() { return "in"; }
public String traitToInstanceEdgeDirection() {
return "in";
}
@Override
public String idAttributeName() { return metadataRepository.getIdAttributeName(); }
public String idAttributeName() {
return metadataRepository.getIdAttributeName();
}
@Override
public scala.collection.Seq<String> typeTestExpression(String typeName, IntSequence intSeq) {
......
......@@ -23,8 +23,8 @@ import com.thinkaurelius.titan.core.TitanIndexQuery;
import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.discovery.DiscoveryException;
import org.apache.atlas.discovery.DiscoveryService;
import org.apache.atlas.query.Expressions;
......@@ -71,8 +71,8 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
public final static String SCORE = "score";
@Inject
GraphBackedDiscoveryService(GraphProvider<TitanGraph> graphProvider,
MetadataRepository metadataRepository) throws DiscoveryException {
GraphBackedDiscoveryService(GraphProvider<TitanGraph> graphProvider, MetadataRepository metadataRepository)
throws DiscoveryException {
this.titanGraph = graphProvider.get();
this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
}
......@@ -86,7 +86,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
String graphQuery = String.format("v.%s:(%s)", Constants.ENTITY_TEXT_PROPERTY_KEY, query);
LOG.debug("Full text query: {}", graphQuery);
Iterator<TitanIndexQuery.Result<Vertex>> results =
titanGraph.indexQuery(Constants.FULLTEXT_INDEX, graphQuery).vertices().iterator();
titanGraph.indexQuery(Constants.FULLTEXT_INDEX, graphQuery).vertices().iterator();
JSONArray response = new JSONArray();
while (results.hasNext()) {
......@@ -143,8 +143,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
public GremlinQueryResult evaluate(Expressions.Expression expression) {
Expressions.Expression validatedExpression = QueryProcessor.validate(expression);
GremlinQuery gremlinQuery =
new GremlinTranslator(validatedExpression, graphPersistenceStrategy).translate();
GremlinQuery gremlinQuery = new GremlinTranslator(validatedExpression, graphPersistenceStrategy).translate();
LOG.debug("Query = {}", validatedExpression);
LOG.debug("Expression Tree = {}", validatedExpression.treeString());
LOG.debug("Gremlin Query = {}", gremlinQuery.queryStr());
......@@ -162,8 +161,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
*/
@Override
@GraphTransaction
public List<Map<String, String>> searchByGremlin(String gremlinQuery)
throws DiscoveryException {
public List<Map<String, String>> searchByGremlin(String gremlinQuery) throws DiscoveryException {
LOG.info("Executing gremlin query={}", gremlinQuery);
ScriptEngineManager manager = new ScriptEngineManager();
ScriptEngine engine = manager.getEngineByName("gremlin-groovy");
......@@ -189,8 +187,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
Map<String, String> oRow = new HashMap<>();
if (r instanceof Map) {
@SuppressWarnings("unchecked")
Map<Object, Object> iRow = (Map) r;
@SuppressWarnings("unchecked") Map<Object, Object> iRow = (Map) r;
for (Map.Entry e : iRow.entrySet()) {
Object k = e.getKey();
Object v = e.getValue();
......
......@@ -23,7 +23,7 @@ public final class Constants {
/**
* Globally Unique identifier property key.
*/
public static final String INTERNAL_PROPERTY_KEY_PREFIX = "__";
public static final String GUID_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "guid";
public static final String GUID_INDEX = "guid_index";
......
......@@ -38,7 +38,7 @@ public class EntityNotFoundException extends RepositoryException {
}
public EntityNotFoundException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
......@@ -35,7 +35,7 @@ public class IndexCreationException extends IndexException {
}
public IndexCreationException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
......@@ -37,8 +37,7 @@ public class IndexException extends AtlasException {
super(cause);
}
public IndexException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
public IndexException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
......
......@@ -123,6 +123,7 @@ public interface MetadataRepository {
// Trait management functions
/**
* Gets the list of trait names for a given entity represented by a guid.
*
......@@ -139,8 +140,7 @@ public interface MetadataRepository {
* @param traitInstance trait instance that needs to be added to entity
* @throws RepositoryException
*/
void addTrait(String guid,
ITypedStruct traitInstance) throws RepositoryException;
void addTrait(String guid, ITypedStruct traitInstance) throws RepositoryException;
/**
* Deletes a given trait from an existing entity represented by a guid.
......@@ -149,8 +149,7 @@ public interface MetadataRepository {
* @param traitNameToBeDeleted name of the trait
* @throws RepositoryException
*/
void deleteTrait(String guid,
String traitNameToBeDeleted) throws RepositoryException;
void deleteTrait(String guid, String traitNameToBeDeleted) throws RepositoryException;
/**
* Adds the property to the entity that corresponds to the GUID
......
......@@ -40,8 +40,7 @@ public class RepositoryException extends AtlasException {
super(cause);
}
public RepositoryException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
public RepositoryException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
......@@ -45,11 +45,10 @@ public final class GraphHelper {
private GraphHelper() {
}
public static Vertex createVertexWithIdentity(Graph graph,
ITypedReferenceableInstance typedInstance,
Set<String> superTypeNames) {
final Vertex vertexWithIdentity = createVertexWithoutIdentity(
graph, typedInstance.getTypeName(), typedInstance.getId(), superTypeNames);
public static Vertex createVertexWithIdentity(Graph graph, ITypedReferenceableInstance typedInstance,
Set<String> superTypeNames) {
final Vertex vertexWithIdentity =
createVertexWithoutIdentity(graph, typedInstance.getTypeName(), typedInstance.getId(), superTypeNames);
// add identity
final String guid = UUID.randomUUID().toString();
......@@ -58,10 +57,8 @@ public final class GraphHelper {
return vertexWithIdentity;
}
public static Vertex createVertexWithoutIdentity(Graph graph,
String typeName,
Id typedInstanceId,
Set<String> superTypeNames) {
public static Vertex createVertexWithoutIdentity(Graph graph, String typeName, Id typedInstanceId,
Set<String> superTypeNames) {
final Vertex vertexWithoutIdentity = graph.addVertex(null);
// add type information
......@@ -69,34 +66,28 @@ public final class GraphHelper {
// add super types
for (String superTypeName : superTypeNames) {
((TitanVertex) vertexWithoutIdentity).addProperty(
Constants.SUPER_TYPES_PROPERTY_KEY, superTypeName);
((TitanVertex) vertexWithoutIdentity).addProperty(Constants.SUPER_TYPES_PROPERTY_KEY, superTypeName);
}
// add version information
vertexWithoutIdentity.setProperty(Constants.VERSION_PROPERTY_KEY, typedInstanceId.version);
// add timestamp information
vertexWithoutIdentity.setProperty(
Constants.TIMESTAMP_PROPERTY_KEY, System.currentTimeMillis());
vertexWithoutIdentity.setProperty(Constants.TIMESTAMP_PROPERTY_KEY, System.currentTimeMillis());
return vertexWithoutIdentity;
}
public static Edge addEdge(TitanGraph titanGraph, Vertex fromVertex, Vertex toVertex,
String edgeLabel) {
LOG.debug("Adding edge for {} -> label {} -> {}",
fromVertex, edgeLabel, toVertex);
public static Edge addEdge(TitanGraph titanGraph, Vertex fromVertex, Vertex toVertex, String edgeLabel) {
LOG.debug("Adding edge for {} -> label {} -> {}", fromVertex, edgeLabel, toVertex);
return titanGraph.addEdge(null, fromVertex, toVertex, edgeLabel);
}
public static Vertex findVertexByGUID(TitanGraph titanGraph,
String value) {
public static Vertex findVertexByGUID(TitanGraph titanGraph, String value) {
LOG.debug("Finding vertex for key={}, value={}", Constants.GUID_PROPERTY_KEY, value);
GraphQuery query = titanGraph.query()
.has(Constants.GUID_PROPERTY_KEY, value);
GraphQuery query = titanGraph.query().has(Constants.GUID_PROPERTY_KEY, value);
Iterator<Vertex> results = query.vertices().iterator();
// returning one since guid should be unique
return results.hasNext() ? results.next() : null;
......@@ -105,20 +96,15 @@ public final class GraphHelper {
public static String vertexString(final Vertex vertex) {
StringBuilder properties = new StringBuilder();
for (String propertyKey : vertex.getPropertyKeys()) {
properties.append(propertyKey)
.append("=").append(vertex.getProperty(propertyKey))
.append(", ");
properties.append(propertyKey).append("=").append(vertex.getProperty(propertyKey)).append(", ");
}
return "v[" + vertex.getId() + "], Properties[" + properties + "]";
}
public static String edgeString(final Edge edge) {
return "e[" + edge.getLabel() + "], ["
+ edge.getVertex(Direction.OUT)
+ " -> " + edge.getLabel() + " -> "
+ edge.getVertex(Direction.IN)
+ "]";
return "e[" + edge.getLabel() + "], [" + edge.getVertex(Direction.OUT) + " -> " + edge.getLabel() + " -> "
+ edge.getVertex(Direction.IN) + "]";
}
/*
......
......@@ -32,8 +32,7 @@ public class ClassStore extends HierarchicalTypeStore {
final ArrayList<ImmutableList<String>> traitNamesStore;
final ClassType classType;
public ClassStore(MemRepository repository, ClassType hierarchicalType)
throws RepositoryException {
public ClassStore(MemRepository repository, ClassType hierarchicalType) throws RepositoryException {
super(repository, hierarchicalType);
classType = hierarchicalType;
traitNamesStore = new ArrayList<ImmutableList<String>>();
......@@ -64,8 +63,7 @@ public class ClassStore extends HierarchicalTypeStore {
String typeName = typeNameList.get(pos);
if (typeName != hierarchicalType.getName()) {
throw new RepositoryException(
String.format("Invalid Id (incorrect typeName, type is %s) : %s",
typeName, id));
String.format("Invalid Id (incorrect typeName, type is %s) : %s", typeName, id));
}
return true;
......
......@@ -60,13 +60,11 @@ public abstract class HierarchicalTypeStore {
*/
ReentrantReadWriteLock lock;
HierarchicalTypeStore(MemRepository repository, HierarchicalType hierarchicalType)
throws RepositoryException {
HierarchicalTypeStore(MemRepository repository, HierarchicalType hierarchicalType) throws RepositoryException {
this.hierarchicalType = (IConstructableType) hierarchicalType;
this.repository = repository;
ImmutableMap.Builder<AttributeInfo, IAttributeStore> b
= new ImmutableBiMap.Builder<AttributeInfo,
IAttributeStore>();
ImmutableMap.Builder<AttributeInfo, IAttributeStore> b =
new ImmutableBiMap.Builder<AttributeInfo, IAttributeStore>();
typeNameList = Lists.newArrayList((String) null);
ImmutableList<AttributeInfo> l = hierarchicalType.immediateAttrs;
for (AttributeInfo i : l) {
......@@ -74,8 +72,7 @@ public abstract class HierarchicalTypeStore {
}
attrStores = b.build();
ImmutableList.Builder<HierarchicalTypeStore> b1
= new ImmutableList.Builder<HierarchicalTypeStore>();
ImmutableList.Builder<HierarchicalTypeStore> b1 = new ImmutableList.Builder<HierarchicalTypeStore>();
Set<String> allSuperTypeNames = hierarchicalType.getAllSuperTypeNames();
for (String s : allSuperTypeNames) {
b1.add(repository.getStore(s));
......
......@@ -29,8 +29,7 @@ public interface IAttributeStore {
* @param instance
* @throws RepositoryException
*/
void store(int pos, IConstructableType type, StructInstance instance)
throws RepositoryException;
void store(int pos, IConstructableType type, StructInstance instance) throws RepositoryException;
/**
* load the Instance with the value from position 'pos' for the attribute.
......
......@@ -129,9 +129,8 @@ public class MemRepository implements IRepository {
*/
for (Id oldId : discoverInstances.idToNewIdMap.keySet()) {
if (!discoverInstances.idToInstanceMap.containsKey(oldId)) {
throw new RepositoryException(String.format("Invalid Object Graph: " +
"Encountered an unassignedId %s that is not associated with an Instance",
oldId));
throw new RepositoryException(String.format("Invalid Object Graph: "
+ "Encountered an unassignedId %s that is not associated with an Instance", oldId));
}
}
......@@ -140,18 +139,14 @@ public class MemRepository implements IRepository {
* - create a ITypedReferenceableInstance.
* replace any old References ( ids or object references) with new Ids.
*/
List<ITypedReferenceableInstance> newInstances
= new ArrayList<ITypedReferenceableInstance>();
List<ITypedReferenceableInstance> newInstances = new ArrayList<ITypedReferenceableInstance>();
ITypedReferenceableInstance retInstance = null;
Set<ClassType> classTypes = new TreeSet<ClassType>();
Set<TraitType> traitTypes = new TreeSet<TraitType>();
for (IReferenceableInstance transientInstance : discoverInstances.idToInstanceMap
.values()) {
for (IReferenceableInstance transientInstance : discoverInstances.idToInstanceMap.values()) {
try {
ClassType cT = typeSystem
.getDataType(ClassType.class, transientInstance.getTypeName());
ITypedReferenceableInstance newInstance = cT
.convert(transientInstance, Multiplicity.REQUIRED);
ClassType cT = typeSystem.getDataType(ClassType.class, transientInstance.getTypeName());
ITypedReferenceableInstance newInstance = cT.convert(transientInstance, Multiplicity.REQUIRED);
newInstances.add(newInstance);
classTypes.add(cT);
......@@ -172,8 +167,7 @@ public class MemRepository implements IRepository {
} catch (AtlasException me) {
throw new RepositoryException(
String.format("Failed to create Instance(id = %s",
transientInstance.getId()), me);
String.format("Failed to create Instance(id = %s", transientInstance.getId()), me);
}
}
......@@ -237,8 +231,7 @@ public class MemRepository implements IRepository {
return retInstance;
}
public ITypedReferenceableInstance update(ITypedReferenceableInstance i)
throws RepositoryException {
public ITypedReferenceableInstance update(ITypedReferenceableInstance i) throws RepositoryException {
throw new RepositoryException("not implemented");
}
......@@ -267,8 +260,7 @@ public class MemRepository implements IRepository {
* - load instance traits
* - add to GraphWalker
*/
ITypedReferenceableInstance getDuringWalk(Id id, ObjectGraphWalker walker)
throws RepositoryException {
ITypedReferenceableInstance getDuringWalk(Id id, ObjectGraphWalker walker) throws RepositoryException {
ClassStore cS = getClassStore(id.getTypeName());
if (cS == null) {
throw new RepositoryException(String.format("Unknown Class %s", id.getTypeName()));
......
......@@ -67,22 +67,18 @@ public class ReplaceIdWithInstance implements ObjectGraphWalker.NodeProcessor {
convertToInstances((ImmutableCollection) nd.value, nd.aInfo.multiplicity, aT));
} else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
DataTypes.MapType mT = (DataTypes.MapType) nd.aInfo.dataType();
nd.instance.set(nd.attributeName,
convertToInstances((ImmutableMap) nd.value, nd.aInfo.multiplicity, mT));
nd.instance.set(nd.attributeName, convertToInstances((ImmutableMap) nd.value, nd.aInfo.multiplicity, mT));
}
}
ImmutableCollection<?> convertToInstances(ImmutableCollection<?> val,
Multiplicity m, DataTypes.ArrayType arrType)
ImmutableCollection<?> convertToInstances(ImmutableCollection<?> val, Multiplicity m, DataTypes.ArrayType arrType)
throws AtlasException {
if (val == null ||
arrType.getElemType().getTypeCategory() != DataTypes.TypeCategory.CLASS) {
if (val == null || arrType.getElemType().getTypeCategory() != DataTypes.TypeCategory.CLASS) {
return val;
}
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder()
: ImmutableList.builder();
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder() : ImmutableList.builder();
Iterator it = val.iterator();
while (it.hasNext()) {
Object elem = it.next();
......@@ -97,13 +93,11 @@ public class ReplaceIdWithInstance implements ObjectGraphWalker.NodeProcessor {
return b.build();
}
ImmutableMap<?, ?> convertToInstances(ImmutableMap val, Multiplicity m,
DataTypes.MapType mapType)
ImmutableMap<?, ?> convertToInstances(ImmutableMap val, Multiplicity m, DataTypes.MapType mapType)
throws AtlasException {
if (val == null ||
(mapType.getKeyType().getTypeCategory() != DataTypes.TypeCategory.CLASS &&
mapType.getValueType().getTypeCategory() != DataTypes.TypeCategory.CLASS)) {
if (val == null || (mapType.getKeyType().getTypeCategory() != DataTypes.TypeCategory.CLASS
&& mapType.getValueType().getTypeCategory() != DataTypes.TypeCategory.CLASS)) {
return val;
}
ImmutableMap.Builder b = ImmutableMap.builder();
......
......@@ -65,8 +65,7 @@ public class StructStore extends AttributeStores.AbstractAttributeStore implemen
}
@Override
protected void store(StructInstance instance, int colPos, String attrName,
Map<String, Object> m) {
protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
m.put(attrName, instance.structs[colPos]);
}
......
......@@ -29,8 +29,7 @@ public class TraitStore extends HierarchicalTypeStore {
final ArrayList<String> classNameStore;
public TraitStore(MemRepository repository, TraitType hierarchicalType)
throws RepositoryException {
public TraitStore(MemRepository repository, TraitType hierarchicalType) throws RepositoryException {
super(repository, hierarchicalType);
classNameStore = new ArrayList<>();
}
......
......@@ -25,8 +25,8 @@ import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.AtlasException;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphProvider;
import org.apache.atlas.typesystem.TypesDef;
......@@ -80,25 +80,25 @@ public class GraphBackedTypeStore implements ITypeStore {
IDataType dataType = typeSystem.getDataType(IDataType.class, typeName);
LOG.debug("Processing {}.{} in type store", dataType.getTypeCategory(), dataType.getName());
switch (dataType.getTypeCategory()) {
case ENUM:
storeInGraph((EnumType)dataType);
break;
case STRUCT:
StructType structType = (StructType) dataType;
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
ImmutableList.copyOf(structType.infoToNameMap.keySet()), ImmutableList.<String>of());
break;
case TRAIT:
case CLASS:
HierarchicalType type = (HierarchicalType) dataType;
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
type.immediateAttrs, type.superTypes);
break;
default: //Ignore primitive/collection types as they are covered under references
break;
case ENUM:
storeInGraph((EnumType) dataType);
break;
case STRUCT:
StructType structType = (StructType) dataType;
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
ImmutableList.copyOf(structType.infoToNameMap.keySet()), ImmutableList.<String>of());
break;
case TRAIT:
case CLASS:
HierarchicalType type = (HierarchicalType) dataType;
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(), type.immediateAttrs,
type.superTypes);
break;
default: //Ignore primitive/collection types as they are covered under references
break;
}
}
}
......@@ -132,8 +132,7 @@ public class GraphBackedTypeStore implements ITypeStore {
}
private void storeInGraph(TypeSystem typeSystem, DataTypes.TypeCategory category, String typeName,
ImmutableList<AttributeInfo> attributes, ImmutableList<String> superTypes) throws
AtlasException {
ImmutableList<AttributeInfo> attributes, ImmutableList<String> superTypes) throws AtlasException {
Vertex vertex = createVertex(category, typeName);
List<String> attrNames = new ArrayList<>();
if (attributes != null) {
......@@ -161,39 +160,40 @@ public class GraphBackedTypeStore implements ITypeStore {
}
//Add edges for complex attributes
private void addReferencesForAttribute(TypeSystem typeSystem, Vertex vertex, AttributeInfo attribute) throws
AtlasException {
private void addReferencesForAttribute(TypeSystem typeSystem, Vertex vertex, AttributeInfo attribute)
throws AtlasException {
ImmutableList<String> coreTypes = typeSystem.getCoreTypes();
List<IDataType> attrDataTypes = new ArrayList<>();
IDataType attrDataType = attribute.dataType();
String vertexTypeName = vertex.getProperty(Constants.TYPENAME_PROPERTY_KEY);
switch (attrDataType.getTypeCategory()) {
case ARRAY:
String attrType = TypeUtils.parseAsArrayType(attrDataType.getName());
IDataType elementType = typeSystem.getDataType(IDataType.class, attrType);
attrDataTypes.add(elementType);
break;
case MAP:
String[] attrTypes = TypeUtils.parseAsMapType(attrDataType.getName());
IDataType keyType = typeSystem.getDataType(IDataType.class, attrTypes[0]);
IDataType valueType = typeSystem.getDataType(IDataType.class, attrTypes[1]);
attrDataTypes.add(keyType);
attrDataTypes.add(valueType);
break;
case ENUM:
case STRUCT:
case CLASS:
attrDataTypes.add(attrDataType);
break;
case PRIMITIVE: //no vertex for primitive type, hence no edge required
break;
default:
throw new IllegalArgumentException("Attribute cannot reference instances of type : " + attrDataType.getTypeCategory());
case ARRAY:
String attrType = TypeUtils.parseAsArrayType(attrDataType.getName());
IDataType elementType = typeSystem.getDataType(IDataType.class, attrType);
attrDataTypes.add(elementType);
break;
case MAP:
String[] attrTypes = TypeUtils.parseAsMapType(attrDataType.getName());
IDataType keyType = typeSystem.getDataType(IDataType.class, attrTypes[0]);
IDataType valueType = typeSystem.getDataType(IDataType.class, attrTypes[1]);
attrDataTypes.add(keyType);
attrDataTypes.add(valueType);
break;
case ENUM:
case STRUCT:
case CLASS:
attrDataTypes.add(attrDataType);
break;
case PRIMITIVE: //no vertex for primitive type, hence no edge required
break;
default:
throw new IllegalArgumentException(
"Attribute cannot reference instances of type : " + attrDataType.getTypeCategory());
}
for (IDataType attrType : attrDataTypes) {
......
......@@ -94,6 +94,7 @@ public interface MetadataService {
void updateEntity(String guid, String property, String value) throws AtlasException;
// Trait management functions
/**
* Gets the list of trait names for a given entity represented by a guid.
*
......@@ -110,8 +111,7 @@ public interface MetadataService {
* @param traitInstanceDefinition trait instance that needs to be added to entity
* @throws AtlasException
*/
void addTrait(String guid,
String traitInstanceDefinition) throws AtlasException;
void addTrait(String guid, String traitInstanceDefinition) throws AtlasException;
/**
* Deletes a given trait from an existing entity represented by a guid.
......@@ -120,6 +120,5 @@ public interface MetadataService {
* @param traitNameToBeDeleted name of the trait
* @throws AtlasException
*/
void deleteTrait(String guid,
String traitNameToBeDeleted) throws AtlasException;
void deleteTrait(String guid, String traitNameToBeDeleted) throws AtlasException;
}
......@@ -94,44 +94,31 @@ public final class TestUtils {
new EnumTypeDefinition("OrgLevel", new EnumValue("L1", 1), new EnumValue("L2", 2));
ts.defineEnumType(orgLevelEnum);
StructTypeDefinition addressDetails = createStructTypeDef("Address",
createRequiredAttrDef("street", DataTypes.STRING_TYPE),
createRequiredAttrDef("city", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<ClassType> deptTypeDef =
createClassTypeDef("Department", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true,
"department")
);
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person",
ImmutableList.<String>of(),
StructTypeDefinition addressDetails =
createStructTypeDef("Address", createRequiredAttrDef("street", DataTypes.STRING_TYPE),
createRequiredAttrDef("city", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.COLLECTION,
true, "department"));
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createOptionalAttrDef("orgLevel", ts.getDataType(EnumType.class, "OrgLevel")),
createOptionalAttrDef("address", "Address"),
new AttributeDefinition("department",
"Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager",
"Manager", Multiplicity.OPTIONAL, false, "subordinates")
);
HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager",
ImmutableList.of("Person"),
new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, false,
"manager")
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = createTraitTypeDef(
"SecurityClearance",
ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
ts.defineTypes(ImmutableList.of(addressDetails),
ImmutableList.of(securityClearanceTypeDef),
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"));
HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager", ImmutableList.of("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"), Multiplicity.COLLECTION,
false, "manager"));
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
createTraitTypeDef("SecurityClearance", ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.of(addressDetails), ImmutableList.of(securityClearanceTypeDef),
ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
}
......
......@@ -95,8 +95,7 @@ public class GraphBackedDiscoveryServiceTest {
Bindings bindings = engine.createBindings();
bindings.put("g", titanGraph);
String hiveGraphFile = FileUtils.getTempDirectory().getPath()
+ File.separator + System.nanoTime() + ".gson";
String hiveGraphFile = FileUtils.getTempDirectory().getPath() + File.separator + System.nanoTime() + ".gson";
System.out.println("hiveGraphFile = " + hiveGraphFile);
HiveTitanSample.writeGson(hiveGraphFile);
bindings.put("hiveGraphFile", hiveGraphFile);
......@@ -164,71 +163,56 @@ public class GraphBackedDiscoveryServiceTest {
System.out.println("search result = " + r);
// Property Query: list all Person names
r = discoveryService
.searchByGremlin("g.V.filter{it.typeName == 'Person'}.'Person.name'.toList()");
r = discoveryService.searchByGremlin("g.V.filter{it.typeName == 'Person'}.'Person.name'.toList()");
System.out.println("search result = " + r);
}
@DataProvider(name = "dslQueriesProvider")
private Object[][] createDSLQueries() {
return new String[][] {
{"from DB"},
{"DB"},
{"DB where DB.name=\"Reporting\""},
{"DB DB.name = \"Reporting\""},
{"DB where DB.name=\"Reporting\" select name, owner"},
{"DB has name"},
{"DB, Table"},
{"DB is JdbcAccess"},
return new String[][]{{"from DB"}, {"DB"}, {"DB where DB.name=\"Reporting\""}, {"DB DB.name = \"Reporting\""},
{"DB where DB.name=\"Reporting\" select name, owner"}, {"DB has name"}, {"DB, Table"},
{"DB is JdbcAccess"},
/*
{"DB, LoadProcess has name"},
{"DB as db1, Table where db1.name = \"Reporting\""},
{"DB where DB.name=\"Reporting\" and DB.createTime < " + System.currentTimeMillis()},
*/
{"from Table"},
{"Table"},
{"Table is Dimension"},
{"Column where Column isa PII"},
{"View is Dimension"},
{"from Table"}, {"Table"}, {"Table is Dimension"}, {"Column where Column isa PII"},
{"View is Dimension"},
/*{"Column where Column isa PII select Column.name"},*/
{"Column select Column.name"},
{"Column select name"},
{"Column where Column.name=\"customer_id\""},
{"from Table select Table.name"},
{"DB where (name = \"Reporting\")"},
{"DB where (name = \"Reporting\") select name as _col_0, owner as _col_1"},
{"DB where DB is JdbcAccess"},
{"DB where DB has name"},
{"DB Table"},
{"DB where DB has name"},
{"DB as db1 Table where (db1.name = \"Reporting\")"},
{"DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 "},
{"Column select Column.name"}, {"Column select name"}, {"Column where Column.name=\"customer_id\""},
{"from Table select Table.name"}, {"DB where (name = \"Reporting\")"},
{"DB where (name = \"Reporting\") select name as _col_0, owner as _col_1"},
{"DB where DB is JdbcAccess"}, {"DB where DB has name"}, {"DB Table"}, {"DB where DB has name"},
{"DB as db1 Table where (db1.name = \"Reporting\")"},
{"DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 "},
/*
todo: does not work
{"DB where (name = \"Reporting\") and ((createTime + 1) > 0)"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name
as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name
as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
select db1.name as dbName, tab.name as tabName"},
{"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
select db1.name as dbName, tab.name as tabName"},
*/
// trait searches
{"Dimension"},
// trait searches
{"Dimension"},
/*{"Fact"}, - todo: does not work*/
{"JdbcAccess"},
{"ETL"},
{"Metric"},
{"PII"},
// Lineage
{"Table LoadProcess outputTable"},
{"Table loop (LoadProcess outputTable)"},
{"Table as _loop0 loop (LoadProcess outputTable) withPath"},
{"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as destTable withPath"},
{"Table as t, sd, Column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType"},
{"Table where name='sales_fact', db where name='Reporting'"}
};
{"JdbcAccess"}, {"ETL"}, {"Metric"}, {"PII"},
// Lineage
{"Table LoadProcess outputTable"}, {"Table loop (LoadProcess outputTable)"},
{"Table as _loop0 loop (LoadProcess outputTable) withPath"},
{"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as "
+ "destTable withPath"},
{"Table as t, sd, Column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as "
+ "colType"},
{"Table where name='sales_fact', db where name='Reporting'"}};
}
@Test (dataProvider = "dslQueriesProvider")
@Test(dataProvider = "dslQueriesProvider")
public void testSearchByDSLQueries(String dslQuery) throws Exception {
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
......@@ -254,14 +238,10 @@ public class GraphBackedDiscoveryServiceTest {
@DataProvider(name = "invalidDslQueriesProvider")
private Object[][] createInvalidDSLQueries() {
return new String[][] {
{"from Unknown"},
{"Unknown"},
{"Unknown is Blah"},
};
return new String[][]{{"from Unknown"}, {"Unknown"}, {"Unknown is Blah"},};
}
@Test (dataProvider = "invalidDslQueriesProvider", expectedExceptions = DiscoveryException.class)
@Test(dataProvider = "invalidDslQueriesProvider", expectedExceptions = DiscoveryException.class)
public void testSearchByDSLInvalidQueries(String dslQuery) throws Exception {
System.out.println("Executing dslQuery = " + dslQuery);
discoveryService.searchByDSL(dslQuery);
......@@ -289,17 +269,16 @@ public class GraphBackedDiscoveryServiceTest {
* D(d) extends C
*/
private void createTypesWithMultiLevelInheritance() throws Exception {
HierarchicalTypeDefinition A = createClassTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition B =
createClassTypeDef("B", ImmutableList.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createClassTypeDef("C", ImmutableList.of("B"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createClassTypeDef("C", ImmutableList.of("B"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createClassTypeDef("D", ImmutableList.of("C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition D =
createClassTypeDef("D", ImmutableList.of("C"), createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
TypeSystem.getInstance().defineClassTypes(A, B, C, D);
}
......@@ -312,8 +291,7 @@ public class GraphBackedDiscoveryServiceTest {
instance.set("a", 1);
ClassType deptType = TypeSystem.getInstance().getDataType(ClassType.class, "D");
ITypedReferenceableInstance typedInstance =
deptType.convert(instance, Multiplicity.REQUIRED);
ITypedReferenceableInstance typedInstance = deptType.convert(instance, Multiplicity.REQUIRED);
repositoryService.createEntity(typedInstance);
}
......
......@@ -50,12 +50,11 @@ public abstract class BaseTest {
public static final String STRUCT_TYPE_1 = "t1";
public static final String STRUCT_TYPE_2 = "t2";
public static final String TEST_DATE = "2014-12-11T02:35:58.440Z";
public static final long TEST_DATE_IN_LONG=1418265358440L;
public static final long TEST_DATE_IN_LONG = 1418265358440L;
protected IRepository repo;
public static Struct createStruct() throws AtlasException {
StructType structType = (StructType) TypeSystem.getInstance()
.getDataType(StructType.class, STRUCT_TYPE_1);
StructType structType = (StructType) TypeSystem.getInstance().getDataType(StructType.class, STRUCT_TYPE_1);
Struct s = new Struct(structType.getName());
s.set("a", 1);
s.set("b", true);
......@@ -70,8 +69,7 @@ public abstract class BaseTest {
s.set("k", new BigDecimal(1));
s.set("l", new Date(1418265358440L));
s.set("m", Lists.<Integer>asList(Integer.valueOf(1), new Integer[]{Integer.valueOf(1)}));
s.set("n", Lists.<BigDecimal>asList(BigDecimal.valueOf(1.1),
new BigDecimal[]{BigDecimal.valueOf(1.1)}));
s.set("n", Lists.<BigDecimal>asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
Map<String, Double> hm = Maps.<String, Double>newHashMap();
hm.put("a", 1.0);
hm.put("b", 2.0);
......@@ -94,33 +92,30 @@ public abstract class BaseTest {
ts.reset();
repo = new MemRepository(ts);
StructType structType = ts.defineStructType(STRUCT_TYPE_1,
true,
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
TypesUtil.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)));
StructType recursiveStructType = ts.defineStructType(STRUCT_TYPE_2,
true,
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2));
StructType structType =
ts.defineStructType(STRUCT_TYPE_1, true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), TypesUtil
.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)));
StructType recursiveStructType =
ts.defineStructType(STRUCT_TYPE_2, true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2));
}
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs)
throws AtlasException {
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs) throws AtlasException {
return getTypeSystem().defineTraitTypes(tDefs);
}
......@@ -135,45 +130,33 @@ public abstract class BaseTest {
*/
protected void defineDeptEmployeeTypes(TypeSystem ts) throws AtlasException {
HierarchicalTypeDefinition<ClassType> deptTypeDef =
TypesUtil.createClassTypeDef("Department", ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> deptTypeDef = TypesUtil
.createClassTypeDef("Department", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, true, "department")
);
HierarchicalTypeDefinition<ClassType> personTypeDef =
TypesUtil.createClassTypeDef("Person", ImmutableList.<String>of(),
Multiplicity.COLLECTION, true, "department"));
HierarchicalTypeDefinition<ClassType> personTypeDef = TypesUtil
.createClassTypeDef("Person", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department",
"Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager",
"Manager", Multiplicity.OPTIONAL, false, "subordinates")
);
HierarchicalTypeDefinition<ClassType> managerTypeDef =
TypesUtil.createClassTypeDef("Manager",
ImmutableList.<String>of("Person"),
new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager")
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
TypesUtil.createTraitTypeDef("SecurityClearance",
ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"));
HierarchicalTypeDefinition<ClassType> managerTypeDef = TypesUtil
.createClassTypeDef("Manager", ImmutableList.<String>of("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager"));
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = TypesUtil
.createTraitTypeDef("SecurityClearance", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(securityClearanceTypeDef),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef,
managerTypeDef));
ImmutableList<HierarchicalType> types = ImmutableList.of(
ts.getDataType(HierarchicalType.class, "SecurityClearance"),
ts.getDataType(ClassType.class, "Department"),
ts.getDataType(ClassType.class, "Person"),
ts.getDataType(ClassType.class, "Manager")
);
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef, managerTypeDef));
ImmutableList<HierarchicalType> types = ImmutableList
.of(ts.getDataType(HierarchicalType.class, "SecurityClearance"),
ts.getDataType(ClassType.class, "Department"), ts.getDataType(ClassType.class, "Person"),
ts.getDataType(ClassType.class, "Manager"));
repo.defineTypes(types);
......
......@@ -55,26 +55,16 @@ public class EnumTest extends BaseTest {
}
void defineEnums(TypeSystem ts) throws AtlasException {
ts.defineEnumType("HiveObjectType",
new EnumValue("GLOBAL", 1),
new EnumValue("DATABASE", 2),
new EnumValue("TABLE", 3),
new EnumValue("PARTITION", 4),
new EnumValue("COLUMN", 5));
ts.defineEnumType("HiveObjectType", new EnumValue("GLOBAL", 1), new EnumValue("DATABASE", 2),
new EnumValue("TABLE", 3), new EnumValue("PARTITION", 4), new EnumValue("COLUMN", 5));
ts.defineEnumType("PrincipalType",
new EnumValue("USER", 1),
new EnumValue("ROLE", 2),
ts.defineEnumType("PrincipalType", new EnumValue("USER", 1), new EnumValue("ROLE", 2),
new EnumValue("GROUP", 3));
ts.defineEnumType("TxnState",
new EnumValue("COMMITTED", 1),
new EnumValue("ABORTED", 2),
ts.defineEnumType("TxnState", new EnumValue("COMMITTED", 1), new EnumValue("ABORTED", 2),
new EnumValue("OPEN", 3));
ts.defineEnumType("LockLevel",
new EnumValue("DB", 1),
new EnumValue("TABLE", 2),
ts.defineEnumType("LockLevel", new EnumValue("DB", 1), new EnumValue("TABLE", 2),
new EnumValue("PARTITION", 3));
}
......@@ -93,8 +83,7 @@ public class EnumTest extends BaseTest {
s.set("k", new BigDecimal(1));
s.set("l", new Date(1418265358440L));
s.set("m", Lists.asList(1, new Integer[]{1}));
s.set("n",
Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
s.set("n", Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
Map<String, Double> hm = Maps.newHashMap();
hm.put("a", 1.0);
hm.put("b", 2.0);
......@@ -118,65 +107,54 @@ public class EnumTest extends BaseTest {
}
protected ClassType defineClassTypeWithEnum(TypeSystem ts) throws AtlasException {
return ts.defineClassType(TypesUtil.createClassTypeDef("t4",
ImmutableList.<String>of(),
return ts.defineClassType(TypesUtil.createClassTypeDef("t4", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
TypesUtil.createOptionalAttrDef("enum1",
ts.getDataType(EnumType.class, "HiveObjectType")),
TypesUtil.createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")),
TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
TypesUtil.createOptionalAttrDef("enum2",
ts.getDataType(EnumType.class, "PrincipalType")),
TypesUtil.createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")),
TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
TypesUtil
.createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
TypesUtil.createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
TypesUtil.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
TypesUtil.createOptionalAttrDef("enum4",
ts.getDataType(EnumType.class, "LockLevel"))));
TypesUtil.createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
TypesUtil.createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel"))));
}
@Test
public void testStruct() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineEnums(ts);
StructType structType = ts.defineStructType("t3",
true,
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
TypesUtil.createOptionalAttrDef("enum1",
ts.getDataType(EnumType.class, "HiveObjectType")),
TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
TypesUtil.createOptionalAttrDef("enum2",
ts.getDataType(EnumType.class, "PrincipalType")),
TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
TypesUtil
.createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
StructType structType =
ts.defineStructType("t3", true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
TypesUtil.createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")),
TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
TypesUtil.createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")),
TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
TypesUtil.createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
TypesUtil.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
TypesUtil.createOptionalAttrDef("enum4",
ts.getDataType(EnumType.class, "LockLevel")));
TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), TypesUtil
.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
TypesUtil.createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel")));
Struct s = createStructWithEnum("t3");
ITypedStruct typedS = structType.convert(s, Multiplicity.REQUIRED);
......
......@@ -44,43 +44,36 @@ import java.util.List;
public class InstanceE2ETest extends BaseTest {
protected List<HierarchicalTypeDefinition> createHiveTypes(TypeSystem typeSystem)
throws AtlasException {
protected List<HierarchicalTypeDefinition> createHiveTypes(TypeSystem typeSystem) throws AtlasException {
ArrayList<HierarchicalTypeDefinition> typeDefinitions = new ArrayList<>();
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
TypesUtil.createClassTypeDef("hive_database",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition = TypesUtil
.createClassTypeDef("hive_database", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
typeDefinitions.add(databaseTypeDefinition);
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil.createClassTypeDef(
"hive_table",
ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
new AttributeDefinition("hive_database",
"hive_database", Multiplicity.REQUIRED, false, "hive_database"));
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil
.createClassTypeDef("hive_table", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
new AttributeDefinition("hive_database", "hive_database", Multiplicity.REQUIRED, false,
"hive_database"));
typeDefinitions.add(tableTypeDefinition);
HierarchicalTypeDefinition<TraitType> fetlTypeDefinition = TypesUtil.createTraitTypeDef(
"hive_fetl",
ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE));
HierarchicalTypeDefinition<TraitType> fetlTypeDefinition = TypesUtil
.createTraitTypeDef("hive_fetl", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE));
typeDefinitions.add(fetlTypeDefinition);
typeSystem.defineTypes(
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(fetlTypeDefinition),
typeSystem.defineTypes(ImmutableList.<StructTypeDefinition>of(), ImmutableList.of(fetlTypeDefinition),
ImmutableList.of(databaseTypeDefinition, tableTypeDefinition));
return typeDefinitions;
}
protected Referenceable createHiveTableReferenceable()
throws AtlasException {
protected Referenceable createHiveTableReferenceable() throws AtlasException {
Referenceable databaseInstance = new Referenceable("hive_database");
databaseInstance.set("name", "hive_database");
databaseInstance.set("description", "foo database");
......@@ -99,8 +92,7 @@ public class InstanceE2ETest extends BaseTest {
return tableInstance;
}
protected ITypedReferenceableInstance createHiveTableInstance(TypeSystem typeSystem)
throws AtlasException {
protected ITypedReferenceableInstance createHiveTableInstance(TypeSystem typeSystem) throws AtlasException {
ClassType tableType = typeSystem.getDataType(ClassType.class, "hive_table");
return tableType.convert(createHiveTableReferenceable(), Multiplicity.REQUIRED);
}
......@@ -112,8 +104,7 @@ public class InstanceE2ETest extends BaseTest {
createHiveTypes(ts);
String jsonStr = TypesSerialization$.MODULE$
.toJson(ts, ImmutableList.of("hive_database", "hive_table"));
String jsonStr = TypesSerialization$.MODULE$.toJson(ts, ImmutableList.of("hive_database", "hive_table"));
System.out.println(jsonStr);
TypesDef typesDef1 = TypesSerialization$.MODULE$.fromJson(jsonStr);
......@@ -121,8 +112,7 @@ public class InstanceE2ETest extends BaseTest {
ts.reset();
ts.defineTypes(typesDef1);
jsonStr = TypesSerialization$.MODULE$
.toJson(ts, ImmutableList.of("hive_database", "hive_table"));
jsonStr = TypesSerialization$.MODULE$.toJson(ts, ImmutableList.of("hive_database", "hive_table"));
System.out.println(jsonStr);
}
......@@ -152,7 +142,7 @@ public class InstanceE2ETest extends BaseTest {
Referenceable r = createHiveTableReferenceable();
String jsonStr = InstanceSerialization$.MODULE$.toJson(r, true);
Referenceable r1 = InstanceSerialization$.MODULE$.fromJsonReferenceable(jsonStr, true);
Referenceable r1 = InstanceSerialization$.MODULE$.fromJsonReferenceable(jsonStr, true);
ClassType tableType = ts.getDataType(ClassType.class, "hive_table");
ITypedReferenceableInstance i = tableType.convert(r1, Multiplicity.REQUIRED);
......
......@@ -38,8 +38,7 @@ public class StructTest extends BaseTest {
public void setup() throws Exception {
super.setup();
structType = (StructType) getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_1);
recursiveStructType = (StructType) getTypeSystem()
.getDataType(StructType.class, STRUCT_TYPE_2);
recursiveStructType = (StructType) getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_2);
}
@Test
......
......@@ -65,15 +65,13 @@ public class TraitTest extends BaseTest {
*/
@Test
public void test1() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createTraitTypeDef("C", ImmutableList.<String>of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
......@@ -155,15 +153,13 @@ public class TraitTest extends BaseTest {
@Test
public void testRandomOrder() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createTraitTypeDef("C", ImmutableList.<String>of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
......
......@@ -23,8 +23,8 @@ import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import junit.framework.Assert;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.AtlasException;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.TestUtils;
import org.apache.atlas.repository.graph.GraphHelper;
......@@ -80,7 +80,7 @@ public class GraphBackedTypeStoreTest {
}
}
@Test (dependsOnMethods = "testStore")
@Test(dependsOnMethods = "testStore")
public void testRestore() throws Exception {
TypesDef types = typeStore.restore();
......
......@@ -38,7 +38,7 @@ public class TypeNotFoundException extends AtlasException {
}
public TypeNotFoundException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
......@@ -67,9 +67,8 @@ public class Referenceable extends Struct implements IReferenceableInstance {
* @param values
*/
@InterfaceAudience.Private
public Referenceable(String guid, String typeName, Map<String, Object> values,
List<String> _traitNames,
Map<String, IStruct> _traits) {
public Referenceable(String guid, String typeName, Map<String, Object> values, List<String> _traitNames,
Map<String, IStruct> _traits) {
super(typeName, values);
id = new Id(guid, 0, typeName);
traitNames = ImmutableList.copyOf(_traitNames);
......
......@@ -31,8 +31,7 @@ public class DownCastStructInstance implements IStruct {
public final DownCastFieldMapping fieldMapping;
public final IStruct backingInstance;
public DownCastStructInstance(String typeName, DownCastFieldMapping fieldMapping,
IStruct backingInstance) {
public DownCastStructInstance(String typeName, DownCastFieldMapping fieldMapping, IStruct backingInstance) {
this.typeName = typeName;
this.fieldMapping = fieldMapping;
this.backingInstance = backingInstance;
......@@ -60,7 +59,7 @@ public class DownCastStructInstance implements IStruct {
@Override
public Map<String, Object> getValuesMap() throws AtlasException {
Map<String,Object> m = new HashMap<>();
Map<String, Object> m = new HashMap<>();
for (String attr : fieldMapping.fieldNameMap.keySet()) {
m.put(attr, get(attr));
}
......
......@@ -70,8 +70,7 @@ public class Id implements ITypedReferenceableInstance {
}
public String toString() {
return String
.format("(type: %s, id: %s)", className, isUnassigned() ? "<unassigned>" : "" + id);
return String.format("(type: %s, id: %s)", className, isUnassigned() ? "<unassigned>" : "" + id);
}
public String getClassName() {
......@@ -88,14 +87,24 @@ public class Id implements ITypedReferenceableInstance {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Id id1 = (Id) o;
if (version != id1.version) return false;
if (!className.equals(id1.className)) return false;
if (!id.equals(id1.id)) return false;
if (version != id1.version) {
return false;
}
if (!className.equals(id1.className)) {
return false;
}
if (!id.equals(id1.id)) {
return false;
}
return true;
}
......
......@@ -59,8 +59,7 @@ public class MapIds implements ObjectGraphWalker.NodeProcessor {
}
} else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
DataTypes.ArrayType aT = (DataTypes.ArrayType) nd.aInfo.dataType();
Object v = aT
.mapIds((ImmutableCollection) nd.value, nd.aInfo.multiplicity, idToNewIdMap);
Object v = aT.mapIds((ImmutableCollection) nd.value, nd.aInfo.multiplicity, idToNewIdMap);
nd.instance.set(nd.attributeName, v);
} else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
DataTypes.MapType mT = (DataTypes.MapType) nd.aInfo.dataType();
......
......@@ -40,20 +40,12 @@ public class ReferenceableInstance extends StructInstance implements ITypedRefer
private Id id;
public ReferenceableInstance(Id id, String dataTypeName, FieldMapping fieldMapping,
boolean[] nullFlags,
boolean[] bools, byte[] bytes, short[] shorts, int[] ints,
long[] longs,
float[] floats, double[] doubles, BigDecimal[] bigDecimals,
BigInteger[] bigIntegers, Date[] dates, String[] strings,
ImmutableList<Object>[] arrays,
ImmutableMap<Object, Object>[] maps,
StructInstance[] structs,
ReferenceableInstance[] referenceableInstances,
Id[] ids,
ImmutableMap<String, ITypedStruct> traits) {
super(dataTypeName, fieldMapping, nullFlags, bools, bytes, shorts, ints, longs, floats,
doubles, bigDecimals,
public ReferenceableInstance(Id id, String dataTypeName, FieldMapping fieldMapping, boolean[] nullFlags,
boolean[] bools, byte[] bytes, short[] shorts, int[] ints, long[] longs, float[] floats, double[] doubles,
BigDecimal[] bigDecimals, BigInteger[] bigIntegers, Date[] dates, String[] strings,
ImmutableList<Object>[] arrays, ImmutableMap<Object, Object>[] maps, StructInstance[] structs,
ReferenceableInstance[] referenceableInstances, Id[] ids, ImmutableMap<String, ITypedStruct> traits) {
super(dataTypeName, fieldMapping, nullFlags, bools, bytes, shorts, ints, longs, floats, doubles, bigDecimals,
bigIntegers, dates, strings, arrays, maps, structs, referenceableInstances, ids);
this.id = id;
this.traits = traits;
......
......@@ -35,15 +35,14 @@ public final class AttributeDefinition {
*/
public final String reverseAttributeName;
public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity,
boolean isComposite, String reverseAttributeName) {
public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity, boolean isComposite,
String reverseAttributeName) {
this(name, dataTypeName, multiplicity, isComposite, false, true, reverseAttributeName);
}
public AttributeDefinition(String name, String dataTypeName,
Multiplicity multiplicity, boolean isComposite, boolean isUnique,
boolean isIndexable, String reverseAttributeName) {
public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity, boolean isComposite,
boolean isUnique, boolean isIndexable, String reverseAttributeName) {
this.name = ParamChecker.notEmpty(name, "Attribute name");
this.dataTypeName = ParamChecker.notEmpty(dataTypeName, "Attribute type");
this.multiplicity = multiplicity;
......@@ -55,21 +54,37 @@ public final class AttributeDefinition {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AttributeDefinition that = (AttributeDefinition) o;
if (isComposite != that.isComposite) return false;
if (isUnique != that.isUnique) return false;
if (isIndexable != that.isIndexable) return false;
if (!dataTypeName.equals(that.dataTypeName)) return false;
if (!multiplicity.equals(that.multiplicity)) return false;
if (!name.equals(that.name)) return false;
if (reverseAttributeName != null
? !reverseAttributeName.equals(that.reverseAttributeName)
: that.reverseAttributeName != null)
if (isComposite != that.isComposite) {
return false;
}
if (isUnique != that.isUnique) {
return false;
}
if (isIndexable != that.isIndexable) {
return false;
}
if (!dataTypeName.equals(that.dataTypeName)) {
return false;
}
if (!multiplicity.equals(that.multiplicity)) {
return false;
}
if (!name.equals(that.name)) {
return false;
}
if (reverseAttributeName != null ? !reverseAttributeName.equals(that.reverseAttributeName) :
that.reverseAttributeName != null) {
return false;
}
return true;
}
......
......@@ -39,8 +39,9 @@ public class AttributeInfo {
AttributeInfo(TypeSystem t, AttributeDefinition def, Map<String, IDataType> tempTypes) throws AtlasException {
this.name = def.name;
this.dataType = (tempTypes != null && tempTypes.containsKey(def.dataTypeName)) ?
tempTypes.get(def.dataTypeName) : t.getDataType(IDataType.class, def.dataTypeName);
this.dataType =
(tempTypes != null && tempTypes.containsKey(def.dataTypeName)) ? tempTypes.get(def.dataTypeName) :
t.getDataType(IDataType.class, def.dataTypeName);
this.multiplicity = def.multiplicity;
this.isComposite = def.isComposite;
this.isUnique = def.isUnique;
......
......@@ -53,8 +53,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
infoToNameMap = null;
}
ClassType(TypeSystem typeSystem, String name, ImmutableList<String> superTypes,
AttributeInfo... fields)
ClassType(TypeSystem typeSystem, String name, ImmutableList<String> superTypes, AttributeInfo... fields)
throws AtlasException {
super(typeSystem, ClassType.class, name, superTypes, fields);
infoToNameMap = TypeUtils.buildAttrInfoToNameMap(fieldMapping);
......@@ -71,8 +70,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
if (isSubType(cType.getName())) {
return;
}
throw new AtlasException(
String.format("Id %s is not valid for class %s", id, getName()));
throw new AtlasException(String.format("Id %s is not valid for class %s", id, getName()));
}
}
......@@ -84,8 +82,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
}
@Override
public ITypedReferenceableInstance convert(Object val, Multiplicity m)
throws AtlasException {
public ITypedReferenceableInstance convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof ITypedReferenceableInstance) {
......@@ -122,9 +119,9 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
id = r.getId();
}
ITypedReferenceableInstance tr = r != null ?
createInstanceWithTraits(id, r, r.getTraits().toArray(new String[0]))
: createInstance(id);
ITypedReferenceableInstance tr =
r != null ? createInstanceWithTraits(id, r, r.getTraits().toArray(new String[0])) :
createInstance(id);
if (id != null && id.isAssigned()) {
return tr;
......@@ -134,8 +131,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
String attrKey = e.getKey();
AttributeInfo i = e.getValue();
Object aVal = s.get(attrKey);
if (aVal != null &&
i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
if (aVal != null && i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
if (!i.isComposite) {
aVal = ((IReferenceableInstance) aVal).getId();
}
......@@ -164,25 +160,21 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
@Override
public ITypedReferenceableInstance createInstance() throws AtlasException {
return createInstance((String[])null);
return createInstance((String[]) null);
}
public ITypedReferenceableInstance createInstance(String... traitNames)
throws AtlasException {
public ITypedReferenceableInstance createInstance(String... traitNames) throws AtlasException {
return createInstance(null, traitNames);
}
public ITypedReferenceableInstance createInstance(Id id, String... traitNames)
throws AtlasException {
public ITypedReferenceableInstance createInstance(Id id, String... traitNames) throws AtlasException {
return createInstanceWithTraits(id, null, traitNames);
}
public ITypedReferenceableInstance createInstanceWithTraits(Id id, Referenceable r,
String... traitNames)
public ITypedReferenceableInstance createInstanceWithTraits(Id id, Referenceable r, String... traitNames)
throws AtlasException {
ImmutableMap.Builder<String, ITypedStruct> b
= new ImmutableBiMap.Builder<String, ITypedStruct>();
ImmutableMap.Builder<String, ITypedStruct> b = new ImmutableBiMap.Builder<String, ITypedStruct>();
if (traitNames != null) {
for (String t : traitNames) {
TraitType tType = typeSystem.getDataType(TraitType.class, t);
......@@ -193,9 +185,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
}
}
return new ReferenceableInstance(id == null ? new Id(getName()) : id,
getName(),
fieldMapping,
return new ReferenceableInstance(id == null ? new Id(getName()) : id, getName(), fieldMapping,
new boolean[fieldMapping.fields.size()],
fieldMapping.numBools == 0 ? null : new boolean[fieldMapping.numBools],
fieldMapping.numBytes == 0 ? null : new byte[fieldMapping.numBytes],
......@@ -204,23 +194,19 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
fieldMapping.numLongs == 0 ? null : new long[fieldMapping.numLongs],
fieldMapping.numFloats == 0 ? null : new float[fieldMapping.numFloats],
fieldMapping.numDoubles == 0 ? null : new double[fieldMapping.numDoubles],
fieldMapping.numBigDecimals == 0 ? null
: new BigDecimal[fieldMapping.numBigDecimals],
fieldMapping.numBigDecimals == 0 ? null : new BigDecimal[fieldMapping.numBigDecimals],
fieldMapping.numBigInts == 0 ? null : new BigInteger[fieldMapping.numBigInts],
fieldMapping.numDates == 0 ? null : new Date[fieldMapping.numDates],
fieldMapping.numStrings == 0 ? null : new String[fieldMapping.numStrings],
fieldMapping.numArrays == 0 ? null : new ImmutableList[fieldMapping.numArrays],
fieldMapping.numMaps == 0 ? null : new ImmutableMap[fieldMapping.numMaps],
fieldMapping.numStructs == 0 ? null : new StructInstance[fieldMapping.numStructs],
fieldMapping.numReferenceables == 0 ? null
: new ReferenceableInstance[fieldMapping.numReferenceables],
fieldMapping.numReferenceables == 0 ? null : new Id[fieldMapping.numReferenceables],
b.build());
fieldMapping.numReferenceables == 0 ? null : new ReferenceableInstance[fieldMapping.numReferenceables],
fieldMapping.numReferenceables == 0 ? null : new Id[fieldMapping.numReferenceables], b.build());
}
@Override
public void output(IReferenceableInstance s, Appendable buf, String prefix)
throws AtlasException {
public void output(IReferenceableInstance s, Appendable buf, String prefix) throws AtlasException {
fieldMapping.output(s, buf, prefix);
}
......
......@@ -62,8 +62,7 @@ public class DataTypes {
}
public static String mapTypeName(String keyTypeName, String valueTypeName) {
return String.format("%s%s,%s%s", MAP_TYPE_PREFIX,
keyTypeName, valueTypeName, MAP_TYPE_SUFFIX);
return String.format("%s%s,%s%s", MAP_TYPE_PREFIX, keyTypeName, valueTypeName, MAP_TYPE_SUFFIX);
}
public static String mapTypeName(IDataType keyType, IDataType valueType) {
......@@ -433,8 +432,8 @@ public class DataTypes {
@Override
public void output(Date val, Appendable buf, String prefix) throws AtlasException {
TypeUtils.outputVal(val == null ? "<null>" :
TypeSystem.getInstance().getDateFormat().format(val), buf, prefix);
TypeUtils.outputVal(val == null ? "<null>" : TypeSystem.getInstance().getDateFormat().format(val), buf,
prefix);
}
public Date nullValue() {
......@@ -502,19 +501,18 @@ public class DataTypes {
it = (Iterator) val;
}
if (it != null) {
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder()
: ImmutableList.builder();
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder() : ImmutableList.builder();
while (it.hasNext()) {
b.add(elemType.convert(it.next(),
TypeSystem.getInstance().allowNullsInCollections()
? Multiplicity.OPTIONAL : Multiplicity.REQUIRED));
TypeSystem.getInstance().allowNullsInCollections() ? Multiplicity.OPTIONAL :
Multiplicity.REQUIRED));
}
return m.isUnique ? b.build().asList() : b.build();
} else {
try {
return ImmutableList.of(elemType.convert(val,
TypeSystem.getInstance().allowNullsInCollections()
? Multiplicity.OPTIONAL : Multiplicity.REQUIRED));
TypeSystem.getInstance().allowNullsInCollections() ? Multiplicity.OPTIONAL :
Multiplicity.REQUIRED));
} catch (Exception e) {
throw new ValueConversionException(this, val, e);
}
......@@ -526,15 +524,13 @@ public class DataTypes {
return null;
}
public ImmutableCollection<?> mapIds(ImmutableCollection<?> val, Multiplicity m,
Map<Id, Id> transientToNewIds)
public ImmutableCollection<?> mapIds(ImmutableCollection<?> val, Multiplicity m, Map<Id, Id> transientToNewIds)
throws AtlasException {
if (val == null || elemType.getTypeCategory() != TypeCategory.CLASS) {
return val;
}
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder()
: ImmutableList.builder();
ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder() : ImmutableList.builder();
Iterator it = val.iterator();
while (it.hasNext()) {
Object elem = it.next();
......@@ -600,11 +596,10 @@ public class DataTypes {
while (it.hasNext()) {
Map.Entry e = it.next();
b.put(keyType.convert(e.getKey(),
TypeSystem.getInstance().allowNullsInCollections()
? Multiplicity.OPTIONAL : Multiplicity.REQUIRED),
valueType.convert(e.getValue(),
TypeSystem.getInstance().allowNullsInCollections()
? Multiplicity.OPTIONAL : Multiplicity.REQUIRED));
TypeSystem.getInstance().allowNullsInCollections() ? Multiplicity.OPTIONAL :
Multiplicity.REQUIRED), valueType.convert(e.getValue(),
TypeSystem.getInstance().allowNullsInCollections() ? Multiplicity.OPTIONAL :
Multiplicity.REQUIRED));
}
return b.build();
} else {
......@@ -617,12 +612,11 @@ public class DataTypes {
return null;
}
public ImmutableMap<?, ?> mapIds(ImmutableMap val, Multiplicity m,
Map<Id, Id> transientToNewIds)
public ImmutableMap<?, ?> mapIds(ImmutableMap val, Multiplicity m, Map<Id, Id> transientToNewIds)
throws AtlasException {
if (val == null || (keyType.getTypeCategory() != TypeCategory.CLASS &&
valueType.getTypeCategory() != TypeCategory.CLASS)) {
if (val == null || (keyType.getTypeCategory() != TypeCategory.CLASS
&& valueType.getTypeCategory() != TypeCategory.CLASS)) {
return val;
}
ImmutableMap.Builder b = ImmutableMap.builder();
......
......@@ -30,8 +30,7 @@ public class DownCastFieldMapping {
this.fieldNameMap = fieldNameMap;
}
public void set(DownCastStructInstance s, String attrName, Object val)
throws AtlasException {
public void set(DownCastStructInstance s, String attrName, Object val) throws AtlasException {
String mappedNm = fieldNameMap.get(attrName);
if (mappedNm == null) {
......
......@@ -54,13 +54,13 @@ public class EnumType extends AbstractDataType<EnumValue> {
if (val != null) {
EnumValue e = null;
if (val instanceof EnumValue) {
e = valueMap.get(((EnumValue)val).value);
} else if ( val instanceof Integer || val instanceof BigInt) {
e = valueMap.get(((EnumValue) val).value);
} else if (val instanceof Integer || val instanceof BigInt) {
e = ordinalMap.get(val);
} else if ( val instanceof String) {
} else if (val instanceof String) {
e = valueMap.get(val);
} else if ( val instanceof Number ) {
e = ordinalMap.get(((Number)val).intValue());
} else if (val instanceof Number) {
e = ordinalMap.get(((Number) val).intValue());
}
if (e == null) {
......
......@@ -34,13 +34,21 @@ public final class EnumTypeDefinition {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EnumTypeDefinition that = (EnumTypeDefinition) o;
if (!Arrays.equals(enumValues, that.enumValues)) return false;
if (!name.equals(that.name)) return false;
if (!Arrays.equals(enumValues, that.enumValues)) {
return false;
}
if (!name.equals(that.name)) {
return false;
}
return true;
}
......
......@@ -32,13 +32,21 @@ public class EnumValue {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EnumValue enumValue = (EnumValue) o;
if (ordinal != enumValue.ordinal) return false;
if (!value.equals(enumValue.value)) return false;
if (ordinal != enumValue.ordinal) {
return false;
}
if (!value.equals(enumValue.value)) {
return false;
}
return true;
}
......
......@@ -47,12 +47,9 @@ public class FieldMapping {
public final int numReferenceables;
public FieldMapping(Map<String, AttributeInfo> fields, Map<String, Integer> fieldPos,
Map<String, Integer> fieldNullPos, int numBools, int numBytes,
int numShorts,
int numInts, int numLongs, int numFloats, int numDoubles, int numBigInts,
int numBigDecimals,
int numDates, int numStrings, int numArrays, int numMaps, int numStructs,
int numReferenceables) {
Map<String, Integer> fieldNullPos, int numBools, int numBytes, int numShorts, int numInts, int numLongs,
int numFloats, int numDoubles, int numBigInts, int numBigDecimals, int numDates, int numStrings,
int numArrays, int numMaps, int numStructs, int numReferenceables) {
this.fields = fields;
this.fieldPos = fieldPos;
this.fieldNullPos = fieldNullPos;
......@@ -73,8 +70,7 @@ public class FieldMapping {
this.numReferenceables = numReferenceables;
}
protected void outputFields(IStruct s, Appendable buf, String fieldPrefix)
throws AtlasException {
protected void outputFields(IStruct s, Appendable buf, String fieldPrefix) throws AtlasException {
for (Map.Entry<String, AttributeInfo> e : fields.entrySet()) {
String attrName = e.getKey();
AttributeInfo i = e.getValue();
......@@ -104,8 +100,7 @@ public class FieldMapping {
TypeUtils.outputVal("}", buf, prefix);
}
public void output(IReferenceableInstance s, Appendable buf, String prefix)
throws AtlasException {
public void output(IReferenceableInstance s, Appendable buf, String prefix) throws AtlasException {
if (s == null) {
TypeUtils.outputVal("<null>\n", buf, "");
return;
......
......@@ -36,17 +36,14 @@ public class HierarchicalTypeDefinition<T extends HierarchicalType> extends Stru
* @throws ClassNotFoundException
*/
@InterfaceAudience.Private
public HierarchicalTypeDefinition(String hierarchicalMetaTypeName,
String typeName, String[] superTypes,
AttributeDefinition[] attributeDefinitions)
throws ClassNotFoundException {
this((Class<T>) Class.forName(hierarchicalMetaTypeName),
typeName, ImmutableList.copyOf(superTypes), attributeDefinitions);
public HierarchicalTypeDefinition(String hierarchicalMetaTypeName, String typeName, String[] superTypes,
AttributeDefinition[] attributeDefinitions) throws ClassNotFoundException {
this((Class<T>) Class.forName(hierarchicalMetaTypeName), typeName, ImmutableList.copyOf(superTypes),
attributeDefinitions);
}
public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType,
String typeName, ImmutableList<String> superTypes,
AttributeDefinition[] attributeDefinitions) {
public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType, String typeName, ImmutableList<String> superTypes,
AttributeDefinition[] attributeDefinitions) {
super(typeName, false, attributeDefinitions);
hierarchicalMetaTypeName = hierarchicalMetaType.getName();
this.superTypes = superTypes == null ? ImmutableList.<String>of() : superTypes;
......@@ -54,14 +51,24 @@ public class HierarchicalTypeDefinition<T extends HierarchicalType> extends Stru
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
HierarchicalTypeDefinition that = (HierarchicalTypeDefinition) o;
if (!hierarchicalMetaTypeName.equals(that.hierarchicalMetaTypeName)) return false;
if (!superTypes.equals(that.superTypes)) return false;
if (!hierarchicalMetaTypeName.equals(that.hierarchicalMetaTypeName)) {
return false;
}
if (!superTypes.equals(that.superTypes)) {
return false;
}
return true;
}
......
......@@ -31,6 +31,7 @@ public final class Multiplicity {
public final int lower;
public final int upper;
public final boolean isUnique;
public Multiplicity(int lower, int upper, boolean isUnique) {
assert lower >= 0;
assert upper >= 1;
......@@ -46,14 +47,24 @@ public final class Multiplicity {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Multiplicity that = (Multiplicity) o;
if (isUnique != that.isUnique) return false;
if (lower != that.lower) return false;
if (upper != that.upper) return false;
if (isUnique != that.isUnique) {
return false;
}
if (lower != that.lower) {
return false;
}
if (upper != that.upper) {
return false;
}
return true;
}
......
......@@ -39,8 +39,7 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
final TypeSystem typeSystem;
Set<Id> processedIds;
public ObjectGraphTraversal(TypeSystem typeSystem, IReferenceableInstance start)
throws AtlasException {
public ObjectGraphTraversal(TypeSystem typeSystem, IReferenceableInstance start) throws AtlasException {
this.typeSystem = typeSystem;
queue = new LinkedList<InstanceTuple>();
processedIds = new HashSet<Id>();
......@@ -56,8 +55,8 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
IDataType keyType = ((DataTypes.MapType) dT).getKeyType();
IDataType valueType = ((DataTypes.MapType) dT).getKeyType();
processMap(keyType, valueType, val);
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.STRUCT ||
dT.getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.STRUCT
|| dT.getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
processStruct(val);
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.CLASS) {
processReferenceableInstance(val);
......@@ -66,8 +65,8 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
}
void processMap(IDataType keyType, IDataType valueType, Object val) throws AtlasException {
if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE &&
valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE
&& valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
return;
}
......
......@@ -47,13 +47,11 @@ public class ObjectGraphWalker {
final NodeProcessor nodeProcessor;
Set<Id> processedIds;
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor)
throws AtlasException {
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor) throws AtlasException {
this(typeSystem, nodeProcessor, (IReferenceableInstance) null);
}
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor,
IReferenceableInstance start)
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor, IReferenceableInstance start)
throws AtlasException {
this.typeSystem = typeSystem;
this.nodeProcessor = nodeProcessor;
......@@ -65,8 +63,7 @@ public class ObjectGraphWalker {
}
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor,
List<? extends IReferenceableInstance> roots)
throws AtlasException {
List<? extends IReferenceableInstance> roots) throws AtlasException {
this.typeSystem = typeSystem;
this.nodeProcessor = nodeProcessor;
queue = new LinkedList<IReferenceableInstance>();
......@@ -96,8 +93,8 @@ public class ObjectGraphWalker {
IDataType keyType = ((DataTypes.MapType) dT).getKeyType();
IDataType valueType = ((DataTypes.MapType) dT).getKeyType();
visitMap(keyType, valueType, val);
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.STRUCT ||
dT.getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.STRUCT
|| dT.getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
visitStruct(val);
} else if (dT.getTypeCategory() == DataTypes.TypeCategory.CLASS) {
visitReferenceableInstance(val);
......@@ -106,8 +103,8 @@ public class ObjectGraphWalker {
}
void visitMap(IDataType keyType, IDataType valueType, Object val) throws AtlasException {
if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE &&
valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE
&& valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
return;
}
......
......@@ -28,8 +28,7 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class StructType extends AbstractDataType<IStruct>
implements IConstructableType<IStruct, ITypedStruct> {
public class StructType extends AbstractDataType<IStruct> implements IConstructableType<IStruct, ITypedStruct> {
public final TypeSystem typeSystem;
public final String name;
......@@ -50,13 +49,11 @@ public class StructType extends AbstractDataType<IStruct>
this.handler = null;
}
protected StructType(TypeSystem typeSystem, String name,
ImmutableList<String> superTypes, AttributeInfo... fields)
protected StructType(TypeSystem typeSystem, String name, ImmutableList<String> superTypes, AttributeInfo... fields)
throws AtlasException {
this.typeSystem = typeSystem;
this.name = name;
this.fieldMapping = constructFieldMapping(superTypes,
fields);
this.fieldMapping = constructFieldMapping(superTypes, fields);
infoToNameMap = TypeUtils.buildAttrInfoToNameMap(this.fieldMapping);
this.numFields = this.fieldMapping.fields.size();
this.handler = new TypedStructHandler(this);
......@@ -71,8 +68,7 @@ public class StructType extends AbstractDataType<IStruct>
return name;
}
protected FieldMapping constructFieldMapping(ImmutableList<String> superTypes,
AttributeInfo... fields)
protected FieldMapping constructFieldMapping(ImmutableList<String> superTypes, AttributeInfo... fields)
throws AtlasException {
Map<String, AttributeInfo> fieldsMap = new LinkedHashMap<String, AttributeInfo>();
......@@ -97,9 +93,7 @@ public class StructType extends AbstractDataType<IStruct>
for (AttributeInfo i : fields) {
if (fieldsMap.containsKey(i.name)) {
throw new AtlasException(
String.format(
"Struct defintion cannot contain multiple fields with the same " +
"name %s",
String.format("Struct defintion cannot contain multiple fields with the same " + "name %s",
i.name));
}
fieldsMap.put(i.name, i);
......@@ -146,8 +140,8 @@ public class StructType extends AbstractDataType<IStruct>
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
fieldPos.put(i.name, numMaps);
numMaps++;
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT ||
i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT
|| i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
fieldPos.put(i.name, numStructs);
numStructs++;
} else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
......@@ -158,23 +152,8 @@ public class StructType extends AbstractDataType<IStruct>
}
}
return new FieldMapping(fieldsMap,
fieldPos,
fieldNullPos,
numBools,
numBytes,
numShorts,
numInts,
numLongs,
numFloats,
numDoubles,
numBigInts,
numBigDecimals,
numDates,
numStrings,
numArrays,
numMaps,
numStructs,
return new FieldMapping(fieldsMap, fieldPos, fieldNullPos, numBools, numBytes, numShorts, numInts, numLongs,
numFloats, numDoubles, numBigInts, numBigDecimals, numDates, numStrings, numArrays, numMaps, numStructs,
numReferenceables);
}
......
......@@ -42,13 +42,21 @@ public class StructTypeDefinition {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
StructTypeDefinition that = (StructTypeDefinition) o;
if (!Arrays.equals(attributeDefinitions, that.attributeDefinitions)) return false;
if (!typeName.equals(that.typeName)) return false;
if (!Arrays.equals(attributeDefinitions, that.attributeDefinitions)) {
return false;
}
if (!typeName.equals(that.typeName)) {
return false;
}
return true;
}
......
......@@ -35,15 +35,13 @@ public class TraitType extends HierarchicalType<TraitType, IStruct>
/**
* Used when creating a TraitType, to support recursive Structs.
*/
TraitType(TypeSystem typeSystem, String name, ImmutableList<String> superTraits,
int numFields) {
TraitType(TypeSystem typeSystem, String name, ImmutableList<String> superTraits, int numFields) {
super(typeSystem, TraitType.class, name, superTraits, numFields);
handler = null;
infoToNameMap = null;
}
TraitType(TypeSystem typeSystem, String name, ImmutableList<String> superTraits,
AttributeInfo... fields)
TraitType(TypeSystem typeSystem, String name, ImmutableList<String> superTraits, AttributeInfo... fields)
throws AtlasException {
super(typeSystem, TraitType.class, name, superTraits, fields);
handler = new TypedStructHandler(this);
......
......@@ -36,13 +36,11 @@ public class TypeUtils {
public static final String NAME_REGEX = "[a-zA-z][a-zA-Z0-9_]*";
public static final Pattern NAME_PATTERN = Pattern.compile(NAME_REGEX);
public static final Pattern ARRAY_TYPE_NAME_PATTERN = Pattern
.compile(String.format("array<(%s)>", NAME_REGEX));
public static final Pattern ARRAY_TYPE_NAME_PATTERN = Pattern.compile(String.format("array<(%s)>", NAME_REGEX));
public static final Pattern MAP_TYPE_NAME_PATTERN =
Pattern.compile(String.format("map<(%s),(%s)>", NAME_REGEX, NAME_REGEX));
public static void outputVal(String val, Appendable buf, String prefix)
throws AtlasException {
public static void outputVal(String val, Appendable buf, String prefix) throws AtlasException {
try {
buf.append(prefix).append(val);
} catch (IOException ie) {
......@@ -73,14 +71,14 @@ public class TypeUtils {
return ImmutableMap.copyOf(b);
}
public static TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums, ImmutableList<StructTypeDefinition> structs,
ImmutableList<HierarchicalTypeDefinition<TraitType>> traits,
ImmutableList<HierarchicalTypeDefinition<ClassType>> classes) {
public static TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums,
ImmutableList<StructTypeDefinition> structs, ImmutableList<HierarchicalTypeDefinition<TraitType>> traits,
ImmutableList<HierarchicalTypeDefinition<ClassType>> classes) {
return new TypesDef(JavaConversions.asScalaBuffer(enums), JavaConversions.asScalaBuffer(structs),
JavaConversions.asScalaBuffer(traits), JavaConversions.asScalaBuffer(classes));
}
protected static class Pair<L,R> {
protected static class Pair<L, R> {
protected L left;
protected R right;
......
......@@ -68,8 +68,7 @@ public class TypedStructHandler {
}
}
return ts;
} else if (val instanceof StructInstance &&
((StructInstance) val).getTypeName() == structType.getName()) {
} else if (val instanceof StructInstance && ((StructInstance) val).getTypeName() == structType.getName()) {
return (StructInstance) val;
} else {
throw new ValueConversionException(structType, val);
......@@ -86,9 +85,7 @@ public class TypedStructHandler {
}
public ITypedStruct createInstance() {
return new StructInstance(structType.getName(),
fieldMapping,
new boolean[fieldMapping.fields.size()],
return new StructInstance(structType.getName(), fieldMapping, new boolean[fieldMapping.fields.size()],
fieldMapping.numBools == 0 ? null : new boolean[fieldMapping.numBools],
fieldMapping.numBytes == 0 ? null : new byte[fieldMapping.numBytes],
fieldMapping.numShorts == 0 ? null : new short[fieldMapping.numShorts],
......@@ -96,18 +93,15 @@ public class TypedStructHandler {
fieldMapping.numLongs == 0 ? null : new long[fieldMapping.numLongs],
fieldMapping.numFloats == 0 ? null : new float[fieldMapping.numFloats],
fieldMapping.numDoubles == 0 ? null : new double[fieldMapping.numDoubles],
fieldMapping.numBigDecimals == 0 ? null
: new BigDecimal[fieldMapping.numBigDecimals],
fieldMapping.numBigDecimals == 0 ? null : new BigDecimal[fieldMapping.numBigDecimals],
fieldMapping.numBigInts == 0 ? null : new BigInteger[fieldMapping.numBigInts],
fieldMapping.numDates == 0 ? null : new Date[fieldMapping.numDates],
fieldMapping.numStrings == 0 ? null : new String[fieldMapping.numStrings],
fieldMapping.numArrays == 0 ? null : new ImmutableList[fieldMapping.numArrays],
fieldMapping.numMaps == 0 ? null : new ImmutableMap[fieldMapping.numMaps],
fieldMapping.numStructs == 0 ? null : new StructInstance[fieldMapping.numStructs],
fieldMapping.numReferenceables == 0 ? null
: new ReferenceableInstance[fieldMapping.numReferenceables],
fieldMapping.numReferenceables == 0 ? null
: new Id[fieldMapping.numReferenceables]);
fieldMapping.numReferenceables == 0 ? null : new ReferenceableInstance[fieldMapping.numReferenceables],
fieldMapping.numReferenceables == 0 ? null : new Id[fieldMapping.numReferenceables]);
}
public void output(IStruct s, Appendable buf, String prefix) throws AtlasException {
......
......@@ -27,19 +27,16 @@ public class ValueConversionException extends AtlasException {
}
public ValueConversionException(IDataType typ, Object val, Throwable t) {
super(String
.format("Cannot convert value '%s' to datatype %s", val.toString(), typ.getName()),
t);
super(String.format("Cannot convert value '%s' to datatype %s", val.toString(), typ.getName()), t);
}
public ValueConversionException(IDataType typ, Object val, String msg) {
super(String.format("Cannot convert value '%s' to datatype %s because: %s",
val.toString(), typ.getName(), msg));
super(String
.format("Cannot convert value '%s' to datatype %s because: %s", val.toString(), typ.getName(), msg));
}
public ValueConversionException(String typeName, Object val, String msg) {
super(String.format("Cannot convert value '%s' to datatype %s because: %s",
val.toString(), typeName, msg));
super(String.format("Cannot convert value '%s' to datatype %s because: %s", val.toString(), typeName, msg));
}
protected ValueConversionException(String msg) {
......
......@@ -37,40 +37,32 @@ public class TypesUtil {
private TypesUtil() {
}
public static AttributeDefinition createOptionalAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(),
Multiplicity.OPTIONAL, false, null);
public static AttributeDefinition createOptionalAttrDef(String name, IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.OPTIONAL, false, null);
}
public static AttributeDefinition createOptionalAttrDef(String name,
String dataType) {
public static AttributeDefinition createOptionalAttrDef(String name, String dataType) {
return new AttributeDefinition(name, dataType, Multiplicity.OPTIONAL, false, null);
}
public static AttributeDefinition createRequiredAttrDef(String name,
String dataType) {
public static AttributeDefinition createRequiredAttrDef(String name, String dataType) {
return new AttributeDefinition(name, dataType, Multiplicity.REQUIRED, false, null);
}
public static AttributeDefinition createUniqueRequiredAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(),
Multiplicity.REQUIRED, false, true, true, null);
public static AttributeDefinition createUniqueRequiredAttrDef(String name, IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.REQUIRED, false, true, true, null);
}
public static AttributeDefinition createRequiredAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(),
Multiplicity.REQUIRED, false, null);
public static AttributeDefinition createRequiredAttrDef(String name, IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.REQUIRED, false, null);
}
public static EnumTypeDefinition createEnumTypeDef(String name, EnumValue... enumValues) {
return new EnumTypeDefinition(name, enumValues);
}
public static HierarchicalTypeDefinition<TraitType> createTraitTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
public static HierarchicalTypeDefinition<TraitType> createTraitTypeDef(String name,
ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition<>(TraitType.class, name, superTypes, attrDefs);
}
......@@ -78,8 +70,8 @@ public class TypesUtil {
return new StructTypeDefinition(name, attrDefs);
}
public static HierarchicalTypeDefinition<ClassType> createClassTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
public static HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name,
ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition<>(ClassType.class, name, superTypes, attrDefs);
}
}
......@@ -62,38 +62,26 @@ public class SerializationJavaTest extends BaseTest {
TypeSystem ts = getTypeSystem();
HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true,
"department")
);
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person",
ImmutableList.<String>of(),
new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.COLLECTION,
true, "department"));
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department",
"Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager",
"Manager", Multiplicity.OPTIONAL, false, "subordinates")
);
HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager",
ImmutableList.<String>of("Person"),
new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, false,
"manager")
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = createTraitTypeDef(
"SecurityClearance",
ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"));
HierarchicalTypeDefinition<ClassType> managerTypeDef =
createClassTypeDef("Manager", ImmutableList.<String>of("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager"));
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
createTraitTypeDef("SecurityClearance", ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(securityClearanceTypeDef),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef,
managerTypeDef));
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef, managerTypeDef));
Referenceable hrDept = new Referenceable("Department");
Referenceable john = new Referenceable("Person");
......@@ -155,11 +143,9 @@ public class SerializationJavaTest extends BaseTest {
public void testTrait() throws AtlasException {
TypeSystem ts = getTypeSystem();
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = createTraitTypeDef(
"SecurityClearance2",
ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
createTraitTypeDef("SecurityClearance2", ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(securityClearanceTypeDef),
......
......@@ -37,11 +37,10 @@ public abstract class BaseTest {
public static final String STRUCT_TYPE_1 = "t1";
public static final String STRUCT_TYPE_2 = "t2";
public static final String TEST_DATE = "2014-12-11T02:35:58.440Z";
public static final long TEST_DATE_IN_LONG=1418265358440L;
public static final long TEST_DATE_IN_LONG = 1418265358440L;
public static Struct createStruct() throws AtlasException {
StructType structType = TypeSystem.getInstance().getDataType(
StructType.class, STRUCT_TYPE_1);
StructType structType = TypeSystem.getInstance().getDataType(StructType.class, STRUCT_TYPE_1);
Struct s = new Struct(structType.getName());
s.set("a", 1);
s.set("b", true);
......@@ -56,8 +55,7 @@ public abstract class BaseTest {
s.set("k", new BigDecimal(1));
s.set("l", new Date(1418265358440L));
s.set("m", Lists.asList(1, new Integer[]{1}));
s.set("n", Lists.asList(BigDecimal.valueOf(1.1),
new BigDecimal[]{BigDecimal.valueOf(1.1)}));
s.set("n", Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
Map<String, Double> hm = Maps.newHashMap();
hm.put("a", 1.0);
hm.put("b", 2.0);
......@@ -74,41 +72,38 @@ public abstract class BaseTest {
TypeSystem ts = TypeSystem.getInstance();
ts.reset();
StructType structType = ts.defineStructType(STRUCT_TYPE_1,
true,
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
TypesUtil.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)));
StructType structType =
ts.defineStructType(STRUCT_TYPE_1, true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), TypesUtil
.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)));
System.out.println("defined structType = " + structType);
StructType recursiveStructType = ts.defineStructType(STRUCT_TYPE_2,
true,
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2));
StructType recursiveStructType =
ts.defineStructType(STRUCT_TYPE_2, true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2));
System.out.println("defined recursiveStructType = " + recursiveStructType);
}
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs)
throws AtlasException {
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs) throws AtlasException {
return getTypeSystem().defineTraitTypes(tDefs);
}
protected Map<String, IDataType> defineClasses(
HierarchicalTypeDefinition<ClassType>... classDefs) throws AtlasException {
protected Map<String, IDataType> defineClasses(HierarchicalTypeDefinition<ClassType>... classDefs)
throws AtlasException {
return getTypeSystem().defineClassTypes(classDefs);
}
......@@ -123,47 +118,30 @@ public abstract class BaseTest {
protected void defineDeptEmployeeTypes(TypeSystem ts) throws AtlasException {
HierarchicalTypeDefinition<ClassType> deptTypeDef = TypesUtil
.createClassTypeDef("Department",
ImmutableList.<String>of(),
.createClassTypeDef("Department", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true,
"department")
);
HierarchicalTypeDefinition<ClassType> personTypeDef = TypesUtil.createClassTypeDef("Person",
ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department",
"Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager",
"Manager", Multiplicity.OPTIONAL, false, "subordinates")
);
HierarchicalTypeDefinition<ClassType> managerTypeDef =
TypesUtil.createClassTypeDef("Manager",
ImmutableList.of("Person"),
new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager")
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
TypesUtil.createTraitTypeDef(
"SecurityClearance",
ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(securityClearanceTypeDef),
ImmutableList.of(deptTypeDef, personTypeDef,
managerTypeDef));
ImmutableList.of(
ts.getDataType(HierarchicalType.class, "SecurityClearance"),
ts.getDataType(ClassType.class, "Department"),
ts.getDataType(ClassType.class, "Person"),
ts.getDataType(ClassType.class, "Manager")
);
new AttributeDefinition("employees", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, true, "department"));
HierarchicalTypeDefinition<ClassType> personTypeDef = TypesUtil
.createClassTypeDef("Person", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"));
HierarchicalTypeDefinition<ClassType> managerTypeDef = TypesUtil
.createClassTypeDef("Manager", ImmutableList.of("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager"));
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = TypesUtil
.createTraitTypeDef("SecurityClearance", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(), ImmutableList.of(securityClearanceTypeDef),
ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
ImmutableList.of(ts.getDataType(HierarchicalType.class, "SecurityClearance"),
ts.getDataType(ClassType.class, "Department"), ts.getDataType(ClassType.class, "Person"),
ts.getDataType(ClassType.class, "Manager"));
}
protected Referenceable createDeptEg1(TypeSystem ts) throws AtlasException {
......
......@@ -48,26 +48,16 @@ public class EnumTest extends BaseTest {
}
void defineEnums(TypeSystem ts) throws AtlasException {
ts.defineEnumType("HiveObjectType",
new EnumValue("GLOBAL", 1),
new EnumValue("DATABASE", 2),
new EnumValue("TABLE", 3),
new EnumValue("PARTITION", 4),
new EnumValue("COLUMN", 5));
ts.defineEnumType("PrincipalType",
new EnumValue("USER", 1),
new EnumValue("ROLE", 2),
ts.defineEnumType("HiveObjectType", new EnumValue("GLOBAL", 1), new EnumValue("DATABASE", 2),
new EnumValue("TABLE", 3), new EnumValue("PARTITION", 4), new EnumValue("COLUMN", 5));
ts.defineEnumType("PrincipalType", new EnumValue("USER", 1), new EnumValue("ROLE", 2),
new EnumValue("GROUP", 3));
ts.defineEnumType("TxnState",
new EnumValue("COMMITTED", 1),
new EnumValue("ABORTED", 2),
ts.defineEnumType("TxnState", new EnumValue("COMMITTED", 1), new EnumValue("ABORTED", 2),
new EnumValue("OPEN", 3));
ts.defineEnumType("LockLevel",
new EnumValue("DB", 1),
new EnumValue("TABLE", 2),
ts.defineEnumType("LockLevel", new EnumValue("DB", 1), new EnumValue("TABLE", 2),
new EnumValue("PARTITION", 3));
}
......@@ -86,8 +76,7 @@ public class EnumTest extends BaseTest {
s.set("k", new BigDecimal(1));
s.set("l", new Date(1418265358440L));
s.set("m", Lists.asList(1, new Integer[]{1}));
s.set("n",
Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
s.set("n", Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
Map<String, Double> hm = Maps.newHashMap();
hm.put("a", 1.0);
hm.put("b", 2.0);
......@@ -111,55 +100,46 @@ public class EnumTest extends BaseTest {
}
protected ClassType defineClassTypeWithEnum(TypeSystem ts) throws AtlasException {
return ts.defineClassType(createClassTypeDef("t4",
ImmutableList.<String>of(),
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")),
createOptionalAttrDef("e", DataTypes.INT_TYPE),
createOptionalAttrDef("f", DataTypes.INT_TYPE),
createOptionalAttrDef("g", DataTypes.LONG_TYPE),
createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")),
createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
createOptionalAttrDef("l", DataTypes.DATE_TYPE),
createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel"))));
return ts.defineClassType(
createClassTypeDef("t4", ImmutableList.<String>of(), createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")),
createOptionalAttrDef("e", DataTypes.INT_TYPE), createOptionalAttrDef("f", DataTypes.INT_TYPE),
createOptionalAttrDef("g", DataTypes.LONG_TYPE),
createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")),
createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
createOptionalAttrDef("l", DataTypes.DATE_TYPE),
createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel"))));
}
@Test
public void testStruct() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineEnums(ts);
StructType structType = ts.defineStructType("t3",
true,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
StructType structType = ts.defineStructType("t3", true, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")),
createOptionalAttrDef("e", DataTypes.INT_TYPE),
createOptionalAttrDef("f", DataTypes.INT_TYPE),
createOptionalAttrDef("e", DataTypes.INT_TYPE), createOptionalAttrDef("f", DataTypes.INT_TYPE),
createOptionalAttrDef("g", DataTypes.LONG_TYPE),
createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")),
createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
createOptionalAttrDef("h", DataTypes.FLOAT_TYPE), createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
createOptionalAttrDef("l", DataTypes.DATE_TYPE),
createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel")));
Struct s = createStructWithEnum("t3");
......
......@@ -34,8 +34,7 @@ public class StructTest extends BaseTest {
public void setup() throws Exception {
super.setup();
structType = getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_1);
recursiveStructType = getTypeSystem()
.getDataType(StructType.class, STRUCT_TYPE_2);
recursiveStructType = getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_2);
}
@Test
......
......@@ -63,15 +63,13 @@ public class TraitTest extends BaseTest {
*/
@Test
public void test1() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createTraitTypeDef("C", ImmutableList.<String>of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
......@@ -79,11 +77,12 @@ public class TraitTest extends BaseTest {
TraitType DType = (TraitType) getTypeSystem().getDataType(TraitType.class, "D");
// for(String aName : DType.fieldMapping().fields.keySet()) {
// System.out.println(String.format("nameToQualifiedName.put(\"%s\", \"%s\");", aName, DType.getQualifiedName(aName)));
// }
// for(String aName : DType.fieldMapping().fields.keySet()) {
// System.out.println(String.format("nameToQualifiedName.put(\"%s\", \"%s\");", aName, DType
// .getQualifiedName(aName)));
// }
Map<String,String> nameToQualifiedName = new HashMap();
Map<String, String> nameToQualifiedName = new HashMap();
{
nameToQualifiedName.put("d", "D.d");
nameToQualifiedName.put("b", "B.b");
......@@ -172,15 +171,13 @@ public class TraitTest extends BaseTest {
@Test
public void testRandomOrder() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createTraitTypeDef("C", ImmutableList.<String>of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
......
......@@ -51,11 +51,10 @@ public class TypeInheritanceTest extends BaseTest {
*/
@Test
public void testSimpleInheritance() throws AtlasException {
HierarchicalTypeDefinition A = createClassTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition B =
createClassTypeDef("B", ImmutableList.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
defineClasses(A, B);
......@@ -80,12 +79,11 @@ public class TypeInheritanceTest extends BaseTest {
*/
@Test
public void testSimpleInheritanceWithOverrides() throws AtlasException {
HierarchicalTypeDefinition A = createClassTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createRequiredAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition B =
createClassTypeDef("B", ImmutableList.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
defineClasses(A, B);
......@@ -114,17 +112,16 @@ public class TypeInheritanceTest extends BaseTest {
*/
@Test
public void testMultiLevelInheritance() throws AtlasException {
HierarchicalTypeDefinition A = createClassTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE));
HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition B =
createClassTypeDef("B", ImmutableList.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createClassTypeDef("C", ImmutableList.of("B"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition C =
createClassTypeDef("C", ImmutableList.of("B"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createClassTypeDef("D", ImmutableList.of("C"),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition D =
createClassTypeDef("D", ImmutableList.of("C"), createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
defineClasses(A, B, C, D);
......@@ -167,17 +164,15 @@ public class TypeInheritanceTest extends BaseTest {
*/
@Test
public void testDiamondInheritance() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.of("A"),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.of("A"),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.of("B", "C"),
HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
HierarchicalTypeDefinition B =
createTraitTypeDef("B", ImmutableList.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
HierarchicalTypeDefinition C =
createTraitTypeDef("C", ImmutableList.of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
HierarchicalTypeDefinition D =
createTraitTypeDef("D", ImmutableList.of("B", "C"), createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
defineTraits(A, B, C, D);
......
......@@ -49,29 +49,22 @@ public class TypeSystemTest extends BaseTest {
@Test
public void testGetTypeNames() throws Exception {
getTypeSystem().defineEnumType("enum_test",
new EnumValue("0", 0),
new EnumValue("1", 1),
new EnumValue("2", 2),
getTypeSystem().defineEnumType("enum_test", new EnumValue("0", 0), new EnumValue("1", 1), new EnumValue("2", 2),
new EnumValue("3", 3));
Assert.assertTrue(getTypeSystem().getTypeNames().contains("enum_test"));
}
@Test
public void testIsRegistered() throws Exception {
getTypeSystem().defineEnumType("enum_test",
new EnumValue("0", 0),
new EnumValue("1", 1),
new EnumValue("2", 2),
getTypeSystem().defineEnumType("enum_test", new EnumValue("0", 0), new EnumValue("1", 1), new EnumValue("2", 2),
new EnumValue("3", 3));
Assert.assertTrue(getTypeSystem().isRegistered("enum_test"));
}
@Test
public void testGetTraitsNames() throws Exception {
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition =
TypesUtil.createTraitTypeDef("Classification",
ImmutableList.<String>of(),
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil
.createTraitTypeDef("Classification", ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<TraitType> piiTrait =
TypesUtil.createTraitTypeDef("PII", ImmutableList.<String>of());
......@@ -86,23 +79,13 @@ public class TypeSystemTest extends BaseTest {
HierarchicalTypeDefinition<TraitType> financeTrait =
TypesUtil.createTraitTypeDef("Finance", ImmutableList.<String>of());
getTypeSystem().defineTypes(
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait,
soxTrait, secTrait, financeTrait),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
getTypeSystem().defineTypes(ImmutableList.<StructTypeDefinition>of(), ImmutableList
.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait, soxTrait, secTrait,
financeTrait), ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
final ImmutableList<String> traitsNames = getTypeSystem().getTypeNamesByCategory(DataTypes.TypeCategory.TRAIT);
Assert.assertEquals(traitsNames.size(), 7);
List traits = Arrays.asList(new String[]{
"Classification",
"PII",
"PHI",
"PCI",
"SOX",
"SEC",
"Finance",
});
List traits = Arrays.asList(new String[]{"Classification", "PII", "PHI", "PCI", "SOX", "SEC", "Finance",});
Assert.assertFalse(Collections.disjoint(traitsNames, traits));
}
......@@ -122,17 +105,16 @@ public class TypeSystemTest extends BaseTest {
String structName = random();
String attrType = random();
StructTypeDefinition structType = createStructTypeDef(structName,
createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
StructTypeDefinition structType =
createStructTypeDef(structName, createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
String className = random();
HierarchicalTypeDefinition<ClassType> classType =
createClassTypeDef(className, ImmutableList.<String>of(),
createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<ClassType> classType = createClassTypeDef(className, ImmutableList.<String>of(),
createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
String traitName = random();
HierarchicalTypeDefinition<TraitType> traitType = createTraitTypeDef(traitName,
ImmutableList.<String>of(), createRequiredAttrDef(attrType, DataTypes.INT_TYPE));
HierarchicalTypeDefinition<TraitType> traitType = createTraitTypeDef(traitName, ImmutableList.<String>of(),
createRequiredAttrDef(attrType, DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.of(structType), ImmutableList.of(traitType), ImmutableList.of(classType));
}
......
......@@ -25,11 +25,10 @@ import org.testng.annotations.Test;
public class ValidationTest {
@DataProvider(name = "attributeData")
private Object[][] createAttributeData() {
return new String[][]{
{null, "type"}, {"", "type"}, {"name", null}, {"name", ""}};
return new String[][]{{null, "type"}, {"", "type"}, {"name", null}, {"name", ""}};
}
@Test (dataProvider = "attributeData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "attributeData", expectedExceptions = {IllegalArgumentException.class})
public void testAttributes(String name, String type) {
TypesUtil.createRequiredAttrDef(name, type);
}
......@@ -39,7 +38,7 @@ public class ValidationTest {
return new String[][]{{null}, {""}};
}
@Test (dataProvider = "enumValueData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "enumValueData", expectedExceptions = {IllegalArgumentException.class})
public void testEnumValue(String name) {
new EnumValue(name, 1);
}
......@@ -50,7 +49,7 @@ public class ValidationTest {
return new Object[][]{{null, value}, {"", value}, {"name"}};
}
@Test (dataProvider = "enumTypeData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "enumTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testEnumType(String name, EnumValue... values) {
new EnumTypeDefinition(name, values);
}
......@@ -61,7 +60,7 @@ public class ValidationTest {
return new Object[][]{{null, value}, {"", value}, {"name"}};
}
@Test (dataProvider = "structTypeData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "structTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testStructType(String name, AttributeDefinition... values) {
new StructTypeDefinition(name, values);
}
......@@ -71,15 +70,17 @@ public class ValidationTest {
return new Object[][]{{null}, {""}};
}
@Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testClassType(String name) {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");;
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");
;
TypesUtil.createClassTypeDef(name, ImmutableList.of("super"), value);
}
@Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
@Test(dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testTraitType(String name) {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");;
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");
;
TypesUtil.createTraitTypeDef(name, ImmutableList.of("super"), value);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment