Commit d85002a2 by Suma S

Merge pull request #106 from hortonworks/master

Merging from master to DAL
parents 466338d9 9728c6ad
...@@ -40,25 +40,6 @@ ...@@ -40,25 +40,6 @@
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId>
<version>${version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
<scope>runtime</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-typesystem</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId> <artifactId>hadoop-minikdc</artifactId>
<version>${hadoop.version}</version> <version>${hadoop.version}</version>
...@@ -99,6 +80,25 @@ ...@@ -99,6 +80,25 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId>
<version>${version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
<scope>runtime</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-typesystem</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version> <version>${hadoop.version}</version>
......
...@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.metadata.Hive; ...@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.MetadataServiceException;
import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator; import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes; import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.hadoop.metadata.typesystem.Referenceable;
...@@ -91,31 +92,6 @@ public class HiveMetaStoreBridge { ...@@ -91,31 +92,6 @@ public class HiveMetaStoreBridge {
} }
} }
/**
* Gets reference for the database
*
*
* @param databaseName
* @param clusterName cluster name
* @return Reference for database if exists, else null
* @throws Exception
*/
private Referenceable getDatabaseReference(String databaseName, String clusterName) throws Exception {
LOG.debug("Getting reference for database {}", databaseName);
String typeName = HiveDataTypes.HIVE_DB.getName();
MetadataServiceClient dgiClient = getMetadataServiceClient();
String dslQuery = String.format("%s where name = '%s' and clusterName = '%s'",
HiveDataTypes.HIVE_DB.getName(), databaseName, clusterName);
JSONArray results = dgiClient.searchByDSL(dslQuery);
if (results.length() == 0) {
return null;
} else {
String guid = getGuidFromDSLResponse(results.getJSONObject(0));
return new Referenceable(guid, typeName, null);
}
}
public Referenceable registerDatabase(String databaseName) throws Exception { public Referenceable registerDatabase(String databaseName) throws Exception {
Referenceable dbRef = getDatabaseReference(databaseName, clusterName); Referenceable dbRef = getDatabaseReference(databaseName, clusterName);
if (dbRef == null) { if (dbRef == null) {
...@@ -169,6 +145,35 @@ public class HiveMetaStoreBridge { ...@@ -169,6 +145,35 @@ public class HiveMetaStoreBridge {
} }
/** /**
* Gets reference for the database
*
*
* @param databaseName
* @param clusterName cluster name
* @return Reference for database if exists, else null
* @throws Exception
*/
private Referenceable getDatabaseReference(String databaseName, String clusterName) throws Exception {
LOG.debug("Getting reference for database {}", databaseName);
String typeName = HiveDataTypes.HIVE_DB.getName();
String dslQuery = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(),
databaseName, clusterName);
return getEntityReferenceFromDSL(typeName, dslQuery);
}
private Referenceable getEntityReferenceFromDSL(String typeName, String dslQuery) throws Exception {
MetadataServiceClient dgiClient = getMetadataServiceClient();
JSONArray results = dgiClient.searchByDSL(dslQuery);
if (results.length() == 0) {
return null;
} else {
String guid = getGuidFromDSLResponse(results.getJSONObject(0));
return new Referenceable(guid, typeName, null);
}
}
/**
* Gets reference for the table * Gets reference for the table
* *
* @param dbName * @param dbName
...@@ -180,19 +185,47 @@ public class HiveMetaStoreBridge { ...@@ -180,19 +185,47 @@ public class HiveMetaStoreBridge {
LOG.debug("Getting reference for table {}.{}", dbName, tableName); LOG.debug("Getting reference for table {}.{}", dbName, tableName);
String typeName = HiveDataTypes.HIVE_TABLE.getName(); String typeName = HiveDataTypes.HIVE_TABLE.getName();
MetadataServiceClient dgiClient = getMetadataServiceClient();
String query = String.format("%s where name = '%s', dbName where name = '%s' and clusterName = '%s'", // String dslQuery = String.format("%s as t where name = '%s' dbName where name = '%s' and "
HiveDataTypes.HIVE_TABLE.getName(), tableName, dbName, clusterName); // + "clusterName = '%s' select t",
JSONArray results = dgiClient.searchByDSL(query); // HiveDataTypes.HIVE_TABLE.getName(), tableName, dbName, clusterName);
String dbType = HiveDataTypes.HIVE_DB.getName();
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.name', '%s').as('t').out"
+ "('__%s.dbName').has('%s.name', '%s').has('%s.clusterName', '%s').back('t').toList()",
typeName, typeName, tableName, typeName, dbType, dbName, dbType, clusterName);
return getEntityReferenceFromGremlin(typeName, gremlinQuery);
}
private Referenceable getEntityReferenceFromGremlin(String typeName, String gremlinQuery) throws MetadataServiceException,
JSONException {
MetadataServiceClient client = getMetadataServiceClient();
JSONObject response = client.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
if (results.length() == 0) { if (results.length() == 0) {
return null; return null;
} else { }
//There should be just one instance with the given name String guid = results.getJSONObject(0).getString("__guid");
String guid = getGuidFromDSLResponse(results.getJSONObject(0));
LOG.debug("Got reference for table {}.{} = {}", dbName, tableName, guid);
return new Referenceable(guid, typeName, null); return new Referenceable(guid, typeName, null);
} }
private Referenceable getPartitionReference(String dbName, String tableName, List<String> values) throws Exception {
String valuesStr = "['" + StringUtils.join(values, "', '") + "']";
LOG.debug("Getting reference for partition for {}.{} with values {}", dbName, tableName, valuesStr);
String typeName = HiveDataTypes.HIVE_PARTITION.getName();
// String dslQuery = String.format("%s as p where values = %s, tableName where name = '%s', "
// + "dbName where name = '%s' and clusterName = '%s' select p", typeName, valuesStr, tableName,
// dbName, clusterName);
String dbType = HiveDataTypes.HIVE_DB.getName();
String tableType = HiveDataTypes.HIVE_TABLE.getName();
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.values', %s).as('p')."
+ "out('__%s.tableName').has('%s.name', '%s').out('__%s.dbName').has('%s.name', '%s')"
+ ".has('%s.clusterName', '%s').back('p').toList()", typeName, typeName, valuesStr, typeName,
tableType, tableName, tableType, dbType, dbName, dbType, clusterName);
return getEntityReferenceFromGremlin(typeName, gremlinQuery);
} }
private String getGuidFromDSLResponse(JSONObject jsonObject) throws JSONException { private String getGuidFromDSLResponse(JSONObject jsonObject) throws JSONException {
...@@ -292,14 +325,27 @@ public class HiveMetaStoreBridge { ...@@ -292,14 +325,27 @@ public class HiveMetaStoreBridge {
} }
} }
//todo should be idempotent public Referenceable registerPartition(Partition partition) throws Exception {
String dbName = partition.getTable().getDbName();
String tableName = partition.getTable().getTableName();
Referenceable dbRef = registerDatabase(dbName);
Referenceable tableRef = registerTable(dbName, tableName);
Referenceable sdRef = getSDForTable(dbName, tableName);
return importPartition(partition, dbRef, tableRef, sdRef);
}
private Referenceable importPartition(Partition hivePart, private Referenceable importPartition(Partition hivePart,
Referenceable dbReferenceable, Referenceable dbReferenceable,
Referenceable tableReferenceable, Referenceable tableReferenceable,
Referenceable sdReferenceable) throws Exception { Referenceable sdReferenceable) throws Exception {
LOG.info("Importing partition for {}.{} with values {}", dbReferenceable, tableReferenceable, LOG.info("Importing partition for {}.{} with values {}", dbReferenceable, tableReferenceable,
StringUtils.join(hivePart.getValues(), ",")); StringUtils.join(hivePart.getValues(), ","));
Referenceable partRef = new Referenceable(HiveDataTypes.HIVE_PARTITION.getName()); String dbName = hivePart.getTable().getDbName();
String tableName = hivePart.getTable().getTableName();
Referenceable partRef = getPartitionReference(dbName, tableName, hivePart.getValues());
if (partRef == null) {
partRef = new Referenceable(HiveDataTypes.HIVE_PARTITION.getName());
partRef.set("values", hivePart.getValues()); partRef.set("values", hivePart.getValues());
partRef.set("dbName", dbReferenceable); partRef.set("dbName", dbReferenceable);
...@@ -315,8 +361,12 @@ public class HiveMetaStoreBridge { ...@@ -315,8 +361,12 @@ public class HiveMetaStoreBridge {
partRef.set("sd", sdReferenceable); partRef.set("sd", sdReferenceable);
partRef.set("parameters", hivePart.getParameters()); partRef.set("parameters", hivePart.getParameters());
partRef = createInstance(partRef);
return createInstance(partRef); } else {
LOG.info("Partition {}.{} with values {} is already registered with id {}", dbName, tableName,
StringUtils.join(hivePart.getValues(), ","), partRef.getId().id);
}
return partRef;
} }
private void importIndexes(String db, String table, private void importIndexes(String db, String table,
......
...@@ -189,17 +189,28 @@ public class HiveHook implements ExecuteWithHookContext { ...@@ -189,17 +189,28 @@ public class HiveHook implements ExecuteWithHookContext {
switch (event.operation) { switch (event.operation) {
case CREATEDATABASE: case CREATEDATABASE:
Set<WriteEntity> outputs = event.outputs; handleCreateDB(dgiBridge, event);
for (WriteEntity entity : outputs) {
if (entity.getType() == Entity.Type.DATABASE) {
dgiBridge.registerDatabase(entity.getDatabase().getName());
}
}
break; break;
case CREATETABLE: case CREATETABLE:
outputs = event.outputs; handleCreateTable(dgiBridge, event);
for (WriteEntity entity : outputs) { break;
case CREATETABLE_AS_SELECT:
case CREATEVIEW:
case LOAD:
case EXPORT:
case IMPORT:
case QUERY:
registerProcess(dgiBridge, event);
break;
default:
}
}
private void handleCreateTable(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception {
for (WriteEntity entity : event.outputs) {
if (entity.getType() == Entity.Type.TABLE) { if (entity.getType() == Entity.Type.TABLE) {
Table table = entity.getTable(); Table table = entity.getTable();
...@@ -209,17 +220,17 @@ public class HiveHook implements ExecuteWithHookContext { ...@@ -209,17 +220,17 @@ public class HiveHook implements ExecuteWithHookContext {
dgiBridge.registerTable(dbReferenceable, table.getDbName(), table.getTableName()); dgiBridge.registerTable(dbReferenceable, table.getDbName(), table.getTableName());
} }
} }
break; }
case CREATETABLE_AS_SELECT:
registerCTAS(dgiBridge, event);
break;
default: private void handleCreateDB(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception {
for (WriteEntity entity : event.outputs) {
if (entity.getType() == Entity.Type.DATABASE) {
dgiBridge.registerDatabase(entity.getDatabase().getName());
}
} }
} }
private void registerCTAS(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception { private void registerProcess(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception {
Set<ReadEntity> inputs = event.inputs; Set<ReadEntity> inputs = event.inputs;
Set<WriteEntity> outputs = event.outputs; Set<WriteEntity> outputs = event.outputs;
...@@ -243,7 +254,7 @@ public class HiveHook implements ExecuteWithHookContext { ...@@ -243,7 +254,7 @@ public class HiveHook implements ExecuteWithHookContext {
processReferenceable.set("userName", event.user); processReferenceable.set("userName", event.user);
List<Referenceable> source = new ArrayList<>(); List<Referenceable> source = new ArrayList<>();
for (ReadEntity readEntity : inputs) { for (ReadEntity readEntity : inputs) {
if (readEntity.getTyp() == Entity.Type.TABLE) { if (readEntity.getType() == Entity.Type.TABLE) {
Table table = readEntity.getTable(); Table table = readEntity.getTable();
String dbName = table.getDbName().toLowerCase(); String dbName = table.getDbName().toLowerCase();
source.add(dgiBridge.registerTable(dbName, table.getTableName())); source.add(dgiBridge.registerTable(dbName, table.getTableName()));
...@@ -252,11 +263,14 @@ public class HiveHook implements ExecuteWithHookContext { ...@@ -252,11 +263,14 @@ public class HiveHook implements ExecuteWithHookContext {
processReferenceable.set("inputTables", source); processReferenceable.set("inputTables", source);
List<Referenceable> target = new ArrayList<>(); List<Referenceable> target = new ArrayList<>();
for (WriteEntity writeEntity : outputs) { for (WriteEntity writeEntity : outputs) {
if (writeEntity.getTyp() == Entity.Type.TABLE) { if (writeEntity.getType() == Entity.Type.TABLE || writeEntity.getType() == Entity.Type.PARTITION) {
Table table = writeEntity.getTable(); Table table = writeEntity.getTable();
String dbName = table.getDbName().toLowerCase(); String dbName = table.getDbName().toLowerCase();
target.add(dgiBridge.registerTable(dbName, table.getTableName())); target.add(dgiBridge.registerTable(dbName, table.getTableName()));
} }
if (writeEntity.getType() == Entity.Type.PARTITION) {
dgiBridge.registerPartition(writeEntity.getPartition());
}
} }
processReferenceable.set("outputTables", target); processReferenceable.set("outputTables", target);
processReferenceable.set("queryText", queryStr); processReferenceable.set("queryText", queryStr);
......
...@@ -26,10 +26,13 @@ import org.apache.hadoop.metadata.MetadataServiceClient; ...@@ -26,10 +26,13 @@ import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge; import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes; import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.io.File;
public class HiveHookIT { public class HiveHookIT {
private static final String DGI_URL = "http://localhost:21000/"; private static final String DGI_URL = "http://localhost:21000/";
private static final String CLUSTER_NAME = "test"; private static final String CLUSTER_NAME = "test";
...@@ -59,6 +62,9 @@ public class HiveHookIT { ...@@ -59,6 +62,9 @@ public class HiveHookIT {
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true"); hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true"); hiveConf.set("hive.hook.dgi.synchronous", "true");
hiveConf.set(HiveMetaStoreBridge.HIVE_CLUSTER_NAME, CLUSTER_NAME); hiveConf.set(HiveMetaStoreBridge.HIVE_CLUSTER_NAME, CLUSTER_NAME);
//weird, hive prepends test_ to table name
hiveConf.set("hive.test.mode", "true");
hiveConf.set("fs.pfile.impl", "org.apache.hadoop.fs.ProxyLocalFileSystem");
return hiveConf; return hiveConf;
} }
...@@ -69,7 +75,7 @@ public class HiveHookIT { ...@@ -69,7 +75,7 @@ public class HiveHookIT {
@Test @Test
public void testCreateDatabase() throws Exception { public void testCreateDatabase() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); String dbName = "db" + random();
runCommand("create database " + dbName); runCommand("create database " + dbName);
assertDatabaseIsRegistered(dbName); assertDatabaseIsRegistered(dbName);
...@@ -77,15 +83,15 @@ public class HiveHookIT { ...@@ -77,15 +83,15 @@ public class HiveHookIT {
@Test @Test
public void testCreateTable() throws Exception { public void testCreateTable() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); String dbName = "db" + random();
runCommand("create database " + dbName); runCommand("create database " + dbName);
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); String tableName = "table" + random();
runCommand("create table " + dbName + "." + tableName + "(id int, name string)"); runCommand("create table " + dbName + "." + tableName + "(id int, name string)");
assertTableIsRegistered(dbName, tableName); assertTableIsRegistered(dbName, tableName);
tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)"); runCommand("create table " + tableName + "(id int, name string) partitioned by(dt string)");
assertTableIsRegistered("default", tableName); assertTableIsRegistered("default", tableName);
//Create table where database doesn't exist, will create database instance as well //Create table where database doesn't exist, will create database instance as well
...@@ -94,10 +100,10 @@ public class HiveHookIT { ...@@ -94,10 +100,10 @@ public class HiveHookIT {
@Test @Test
public void testCTAS() throws Exception { public void testCTAS() throws Exception {
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)"); runCommand("create table " + tableName + "(id int, name string)");
String ctasTableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); String ctasTableName = "table" + random();
String query = "create table " + ctasTableName + " as select * from " + tableName; String query = "create table " + ctasTableName + " as select * from " + tableName;
runCommand(query); runCommand(query);
...@@ -105,24 +111,125 @@ public class HiveHookIT { ...@@ -105,24 +111,125 @@ public class HiveHookIT {
assertProcessIsRegistered(query); assertProcessIsRegistered(query);
} }
@Test
public void testCreateView() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)");
String viewName = "table" + random();
String query = "create view " + viewName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered("default", viewName);
assertProcessIsRegistered(query);
}
@Test
public void testLoadData() throws Exception {
String tableName = "table" + random();
runCommand("create table test_" + tableName + "(id int, name string)");
String loadFile = file("load");
String query = "load data local inpath 'file://" + loadFile + "' into table " + tableName;
runCommand(query);
assertProcessIsRegistered(query);
}
@Test
public void testInsert() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string) partitioned by(dt string)");
String insertTableName = "table" + random();
runCommand("create table test_" + insertTableName + "(name string) partitioned by(dt string)");
String query = "insert into " + insertTableName + " partition(dt = '2015-01-01') select name from "
+ tableName + " where dt = '2015-01-01'";
runCommand(query);
assertProcessIsRegistered(query);
assertPartitionIsRegistered("default", "test_" + insertTableName, "2015-01-01");
}
private String random() {
return RandomStringUtils.randomAlphanumeric(5).toLowerCase();
}
private String file(String tag) throws Exception {
String filename = "./target/" + tag + "-data-" + random();
File file = new File(filename);
file.createNewFile();
return file.getAbsolutePath();
}
private String mkdir(String tag) throws Exception {
String filename = "./target/" + tag + "-data-" + random();
File file = new File(filename);
file.mkdirs();
return file.getAbsolutePath();
}
@Test
public void testExportImport() throws Exception {
String tableName = "table" + random();
runCommand("create table test_" + tableName + "(name string)");
String filename = "pfile://" + mkdir("export");
String query = "export table " + tableName + " to '" + filename + "'";
runCommand(query);
assertProcessIsRegistered(query);
tableName = "table" + random();
runCommand("create table " + tableName + "(name string)");
query = "import table " + tableName + " from '" + filename + "'";
runCommand(query);
assertProcessIsRegistered(query);
}
@Test
public void testSelect() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)");
String query = "select * from " + tableName;
runCommand(query);
assertProcessIsRegistered(query);
}
private void assertProcessIsRegistered(String queryStr) throws Exception { private void assertProcessIsRegistered(String queryStr) throws Exception {
String dslQuery = String.format("%s where queryText = '%s'", HiveDataTypes.HIVE_PROCESS.getName(), queryStr); String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(), queryStr);
assertInstanceIsRegistered(dslQuery); assertEntityIsRegistered(dslQuery);
} }
private void assertTableIsRegistered(String dbName, String tableName) throws Exception { private void assertTableIsRegistered(String dbName, String tableName) throws Exception {
String query = String.format("%s where name = '%s', dbName where name = '%s' and clusterName = '%s'", String query = String.format("%s where name = '%s', dbName where name = '%s' and clusterName = '%s'",
HiveDataTypes.HIVE_TABLE.getName(), tableName, dbName, CLUSTER_NAME); HiveDataTypes.HIVE_TABLE.getName(), tableName, dbName, CLUSTER_NAME);
assertInstanceIsRegistered(query); assertEntityIsRegistered(query);
} }
private void assertDatabaseIsRegistered(String dbName) throws Exception { private void assertDatabaseIsRegistered(String dbName) throws Exception {
String query = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(), String query = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(),
dbName, CLUSTER_NAME); dbName, CLUSTER_NAME);
assertInstanceIsRegistered(query); assertEntityIsRegistered(query);
}
private void assertPartitionIsRegistered(String dbName, String tableName, String value) throws Exception {
String typeName = HiveDataTypes.HIVE_PARTITION.getName();
String dbType = HiveDataTypes.HIVE_DB.getName();
String tableType = HiveDataTypes.HIVE_TABLE.getName();
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.values', ['%s']).as('p')."
+ "out('__%s.tableName').has('%s.name', '%s').out('__%s.dbName').has('%s.name', '%s')"
+ ".has('%s.clusterName', '%s').back('p').toList()", typeName, typeName, value, typeName,
tableType, tableName, tableType, dbType, dbName, dbType, CLUSTER_NAME);
JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
Assert.assertEquals(results.length(), 1);
} }
private void assertInstanceIsRegistered(String dslQuery) throws Exception{ private void assertEntityIsRegistered(String dslQuery) throws Exception{
JSONArray results = dgiCLient.searchByDSL(dslQuery); JSONArray results = dgiCLient.searchByDSL(dslQuery);
Assert.assertEquals(results.length(), 1); Assert.assertEquals(results.length(), 1);
} }
......
...@@ -200,7 +200,7 @@ public class MetadataServiceClient { ...@@ -200,7 +200,7 @@ public class MetadataServiceClient {
public Referenceable getEntity(String guid) throws MetadataServiceException { public Referenceable getEntity(String guid) throws MetadataServiceException {
JSONObject jsonResponse = callAPI(API.GET_ENTITY, null, guid); JSONObject jsonResponse = callAPI(API.GET_ENTITY, null, guid);
try { try {
String entityInstanceDefinition = jsonResponse.getString(MetadataServiceClient.GUID); String entityInstanceDefinition = jsonResponse.getString(MetadataServiceClient.DEFINITION);
return InstanceSerialization.fromJsonReferenceable(entityInstanceDefinition, true); return InstanceSerialization.fromJsonReferenceable(entityInstanceDefinition, true);
} catch (JSONException e) { } catch (JSONException e) {
throw new MetadataServiceException(e); throw new MetadataServiceException(e);
......
...@@ -101,6 +101,9 @@ ...@@ -101,6 +101,9 @@
<StagingId>apache-staging</StagingId> <StagingId>apache-staging</StagingId>
<StagingName>Apache Release Distribution Repository</StagingName> <StagingName>Apache Release Distribution Repository</StagingName>
<StagingUrl>https://repository.apache.org/content/groups/staging</StagingUrl> <StagingUrl>https://repository.apache.org/content/groups/staging</StagingUrl>
<!-- skips checkstyle and find bugs -->
<skipCheck>false</skipCheck>
</properties> </properties>
<profiles> <profiles>
...@@ -611,6 +614,12 @@ ...@@ -611,6 +614,12 @@
<version>1.8.5</version> <version>1.8.5</version>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.4</version>
</dependency>
</dependencies> </dependencies>
</dependencyManagement> </dependencyManagement>
...@@ -971,6 +980,7 @@ ...@@ -971,6 +980,7 @@
</goals> </goals>
<phase>verify</phase> <phase>verify</phase>
<configuration> <configuration>
<skip>${skipCheck}</skip>
<consoleOutput>true</consoleOutput> <consoleOutput>true</consoleOutput>
<includeTestSourceDirectory>true</includeTestSourceDirectory> <includeTestSourceDirectory>true</includeTestSourceDirectory>
<configLocation>src/build/checkstyle.xml</configLocation> <configLocation>src/build/checkstyle.xml</configLocation>
...@@ -988,6 +998,7 @@ ...@@ -988,6 +998,7 @@
<!--debug>true</debug --> <!--debug>true</debug -->
<xmlOutput>true</xmlOutput> <xmlOutput>true</xmlOutput>
<failOnError>false</failOnError> <failOnError>false</failOnError>
<skip>${skipCheck}</skip>
</configuration> </configuration>
<executions> <executions>
<execution> <execution>
......
...@@ -23,6 +23,8 @@ import com.thinkaurelius.titan.core.TitanIndexQuery; ...@@ -23,6 +23,8 @@ import com.thinkaurelius.titan.core.TitanIndexQuery;
import com.thinkaurelius.titan.core.TitanProperty; import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex; import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.gremlin.groovy.Gremlin;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.discovery.DiscoveryException; import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.DiscoveryService; import org.apache.hadoop.metadata.discovery.DiscoveryService;
......
...@@ -199,7 +199,7 @@ public class GraphBackedSearchIndexer implements SearchIndexer { ...@@ -199,7 +199,7 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
break; break;
case ENUM: case ENUM:
createVertexMixedIndex(propertyName, Integer.class); createVertexMixedIndex(propertyName, String.class);
break; break;
case ARRAY: case ARRAY:
......
...@@ -23,6 +23,7 @@ import com.thinkaurelius.titan.core.TitanGraph; ...@@ -23,6 +23,7 @@ import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Compare; import com.tinkerpop.blueprints.Compare;
import com.tinkerpop.blueprints.GraphQuery; import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.metadata.RepositoryMetadataModule; import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.TestUtils; import org.apache.hadoop.metadata.TestUtils;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService; import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
...@@ -59,6 +60,9 @@ import java.util.Iterator; ...@@ -59,6 +60,9 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createStructTypeDef;
/** /**
* GraphBackedMetadataRepository test * GraphBackedMetadataRepository test
* *
...@@ -588,4 +592,25 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -588,4 +592,25 @@ public class GraphBackedMetadataRepositoryTest {
ClassType tableType = typeSystem.getDataType(ClassType.class, TABLE_TYPE); ClassType tableType = typeSystem.getDataType(ClassType.class, TABLE_TYPE);
return tableType.convert(tableInstance, Multiplicity.REQUIRED); return tableType.convert(tableInstance, Multiplicity.REQUIRED);
} }
private String random() {
return RandomStringUtils.random(10);
}
@Test
public void testUTFValues() throws Exception {
Referenceable hrDept = new Referenceable("Department");
Referenceable john = new Referenceable("Person");
john.set("name", random());
john.set("department", hrDept);
hrDept.set("name", random());
hrDept.set("employees", ImmutableList.of(john));
ClassType deptType = typeSystem.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
guid = repositoryService.createEntity(hrDept2);
Assert.assertNotNull(guid);
}
} }
...@@ -124,6 +124,11 @@ ...@@ -124,6 +124,11 @@
<groupId>com.google.inject</groupId> <groupId>com.google.inject</groupId>
<artifactId>guice</artifactId> <artifactId>guice</artifactId>
</dependency> </dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
</dependencies> </dependencies>
<build> <build>
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata;
import java.util.Arrays;
import java.util.Collection;
public class ParamChecker {
/**
* Check that a value is not null. If null throws an IllegalArgumentException.
*
* @param obj value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static <T> T notNull(T obj, String name) {
if (obj == null) {
throw new IllegalArgumentException(name + " cannot be null");
}
return obj;
}
/**
* Check that a list is not null and that none of its elements is null. If null or if the list has emtpy elements
* throws an IllegalArgumentException.
* @param list the list of T.
* @param name parameter name for the exception message.
*/
public static <T> Collection<T> notNullElements(Collection<T> list, String name) {
notEmpty(list, name);
for (T ele : list) {
notNull(ele, String.format("Collection %s element %s", name, ele));
}
return list;
}
/**
* Check that a list is not null and that none of its elements is null. If null or if the list has emtpy elements
* throws an IllegalArgumentException.
* @param array the array of T.
* @param name parameter name for the exception message.
*/
public static <T> T[] notNullElements(T[] array, String name) {
notEmpty(Arrays.asList(array), name);
for (T ele : array) {
notNull(ele, String.format("Collection %s element %s", name, ele));
}
return array;
}
/**
* Check that a list is not null and not empty.
* @param list the list of T.
* @param name parameter name for the exception message.
*/
public static <T> Collection<T> notEmpty(Collection<T> list, String name) {
notNull(list, name);
if (list.isEmpty()) {
throw new IllegalArgumentException(String.format("Collection %s is empty", name));
}
return list;
}
/**
* Check that a string is not null and not empty. If null or emtpy throws an IllegalArgumentException.
*
* @param value value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static String notEmpty(String value, String name) {
return notEmpty(value, name, null);
}
/**
* Check that a string is not empty if its not null.
*
* @param value value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static String notEmptyIfNotNull(String value, String name) {
return notEmptyIfNotNull(value, name, null);
}
/**
* Check that a string is not empty if its not null.
*
* @param value value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static String notEmptyIfNotNull(String value, String name, String info) {
if (value == null) {
return value;
}
if (value.trim().length() == 0) {
throw new IllegalArgumentException(name + " cannot be empty" + (info == null ? "" : ", " + info));
}
return value.trim();
}
/**
* Check that a string is not null and not empty. If null or emtpy throws an IllegalArgumentException.
*
* @param value value.
* @param name parameter name for the exception message.
* @param info additional information to be printed with the exception message
* @return the given value.
*/
public static String notEmpty(String value, String name, String info) {
if (value == null) {
throw new IllegalArgumentException(name + " cannot be null" + (info == null ? "" : ", " + info));
}
return notEmptyIfNotNull(value, name, info);
}
/**
* Check that a list is not null and that none of its elements is null. If null or if the list has emtpy elements
* throws an IllegalArgumentException.
* @param list the list of strings.
* @param name parameter name for the exception message.
*/
public static Collection<String> notEmptyElements(Collection<String> list, String name) {
notEmpty(list, name);
for (String ele : list) {
notEmpty(ele, String.format("list %s element %s", name, ele));
}
return list;
}
}
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.base.Preconditions; import org.apache.hadoop.metadata.ParamChecker;
public final class AttributeDefinition { public final class AttributeDefinition {
...@@ -44,16 +44,13 @@ public final class AttributeDefinition { ...@@ -44,16 +44,13 @@ public final class AttributeDefinition {
public AttributeDefinition(String name, String dataTypeName, public AttributeDefinition(String name, String dataTypeName,
Multiplicity multiplicity, boolean isComposite, boolean isUnique, Multiplicity multiplicity, boolean isComposite, boolean isUnique,
boolean isIndexable, String reverseAttributeName) { boolean isIndexable, String reverseAttributeName) {
Preconditions.checkNotNull(name); this.name = ParamChecker.notEmpty(name, "Attribute name");
Preconditions.checkNotNull(dataTypeName); this.dataTypeName = ParamChecker.notEmpty(dataTypeName, "Attribute type");
this.name = name;
this.dataTypeName = dataTypeName;
this.multiplicity = multiplicity; this.multiplicity = multiplicity;
this.isComposite = isComposite; this.isComposite = isComposite;
this.isUnique = isUnique; this.isUnique = isUnique;
this.isIndexable = isIndexable; this.isIndexable = isIndexable;
this.reverseAttributeName = reverseAttributeName; this.reverseAttributeName = ParamChecker.notEmptyIfNotNull(reverseAttributeName, "Reverse attribute name");
} }
@Override @Override
......
...@@ -38,7 +38,6 @@ public class AttributeInfo { ...@@ -38,7 +38,6 @@ public class AttributeInfo {
private IDataType dataType; private IDataType dataType;
AttributeInfo(TypeSystem t, AttributeDefinition def, Map<String, IDataType> tempTypes) throws MetadataException { AttributeInfo(TypeSystem t, AttributeDefinition def, Map<String, IDataType> tempTypes) throws MetadataException {
TypeUtils.validateName(def.name);
this.name = def.name; this.name = def.name;
this.dataType = (tempTypes != null && tempTypes.containsKey(def.dataTypeName)) ? this.dataType = (tempTypes != null && tempTypes.containsKey(def.dataTypeName)) ?
tempTypes.get(def.dataTypeName) : t.getDataType(IDataType.class, def.dataTypeName); tempTypes.get(def.dataTypeName) : t.getDataType(IDataType.class, def.dataTypeName);
......
...@@ -18,6 +18,8 @@ ...@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.ParamChecker;
import java.util.Arrays; import java.util.Arrays;
public final class EnumTypeDefinition { public final class EnumTypeDefinition {
...@@ -26,8 +28,8 @@ public final class EnumTypeDefinition { ...@@ -26,8 +28,8 @@ public final class EnumTypeDefinition {
public final EnumValue[] enumValues; public final EnumValue[] enumValues;
public EnumTypeDefinition(String name, EnumValue... enumValues) { public EnumTypeDefinition(String name, EnumValue... enumValues) {
this.name = name; this.name = ParamChecker.notEmpty(name, "Enum type name");
this.enumValues = enumValues; this.enumValues = ParamChecker.notNullElements(enumValues, "Enum values");
} }
@Override @Override
......
...@@ -18,13 +18,15 @@ ...@@ -18,13 +18,15 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.ParamChecker;
public class EnumValue { public class EnumValue {
public final String value; public final String value;
public final int ordinal; public final int ordinal;
public EnumValue(String value, int ordinal) { public EnumValue(String value, int ordinal) {
this.value = value; this.value = ParamChecker.notEmpty(value, "Enum value");
this.ordinal = ordinal; this.ordinal = ordinal;
} }
......
...@@ -47,7 +47,7 @@ public class HierarchicalTypeDefinition<T extends HierarchicalType> extends Stru ...@@ -47,7 +47,7 @@ public class HierarchicalTypeDefinition<T extends HierarchicalType> extends Stru
public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType, public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType,
String typeName, ImmutableList<String> superTypes, String typeName, ImmutableList<String> superTypes,
AttributeDefinition[] attributeDefinitions) { AttributeDefinition[] attributeDefinitions) {
super(typeName, attributeDefinitions); super(typeName, false, attributeDefinitions);
hierarchicalMetaTypeName = hierarchicalMetaType.getName(); hierarchicalMetaTypeName = hierarchicalMetaType.getName();
this.superTypes = superTypes == null ? ImmutableList.<String>of() : superTypes; this.superTypes = superTypes == null ? ImmutableList.<String>of() : superTypes;
} }
......
...@@ -18,6 +18,8 @@ ...@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.ParamChecker;
import java.util.Arrays; import java.util.Arrays;
public class StructTypeDefinition { public class StructTypeDefinition {
...@@ -25,12 +27,19 @@ public class StructTypeDefinition { ...@@ -25,12 +27,19 @@ public class StructTypeDefinition {
public final String typeName; public final String typeName;
public final AttributeDefinition[] attributeDefinitions; public final AttributeDefinition[] attributeDefinitions;
public StructTypeDefinition(String typeName, protected StructTypeDefinition(String typeName, boolean validate, AttributeDefinition... attributeDefinitions) {
AttributeDefinition[] attributeDefinitions) { this.typeName = ParamChecker.notEmpty(typeName, "Struct type name");
this.typeName = typeName; if (attributeDefinitions != null && attributeDefinitions.length != 0) {
ParamChecker.notNullElements(attributeDefinitions, "Attribute definitions");
}
this.attributeDefinitions = attributeDefinitions; this.attributeDefinitions = attributeDefinitions;
} }
public StructTypeDefinition(String typeName, AttributeDefinition[] attributeDefinitions) {
this.typeName = ParamChecker.notEmpty(typeName, "Struct type name");
this.attributeDefinitions = ParamChecker.notNullElements(attributeDefinitions, "Attribute definitions");
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
......
...@@ -291,6 +291,7 @@ public class TypeSystem { ...@@ -291,6 +291,7 @@ public class TypeSystem {
throw new MetadataException( throw new MetadataException(
String.format("Redefinition of type %s not supported", eDef.name)); String.format("Redefinition of type %s not supported", eDef.name));
} }
EnumType eT = new EnumType(this, eDef.name, eDef.enumValues); EnumType eT = new EnumType(this, eDef.name, eDef.enumValues);
types.put(eDef.name, eT); types.put(eDef.name, eT);
typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.ENUM, eDef.name); typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.ENUM, eDef.name);
...@@ -352,7 +353,6 @@ public class TypeSystem { ...@@ -352,7 +353,6 @@ public class TypeSystem {
private void step1() throws MetadataException { private void step1() throws MetadataException {
for (StructTypeDefinition sDef : structDefs) { for (StructTypeDefinition sDef : structDefs) {
assert sDef.typeName != null; assert sDef.typeName != null;
TypeUtils.validateName(sDef.typeName);
if (dataType(sDef.typeName) != null) { if (dataType(sDef.typeName) != null) {
throw new MetadataException( throw new MetadataException(
String.format("Cannot redefine type %s", sDef.typeName)); String.format("Cannot redefine type %s", sDef.typeName));
...@@ -365,7 +365,6 @@ public class TypeSystem { ...@@ -365,7 +365,6 @@ public class TypeSystem {
for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) { for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) {
assert traitDef.typeName != null; assert traitDef.typeName != null;
TypeUtils.validateName(traitDef.typeName);
if (types.containsKey(traitDef.typeName)) { if (types.containsKey(traitDef.typeName)) {
throw new MetadataException( throw new MetadataException(
String.format("Cannot redefine type %s", traitDef.typeName)); String.format("Cannot redefine type %s", traitDef.typeName));
...@@ -380,7 +379,6 @@ public class TypeSystem { ...@@ -380,7 +379,6 @@ public class TypeSystem {
for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) { for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) {
assert classDef.typeName != null; assert classDef.typeName != null;
TypeUtils.validateName(classDef.typeName);
if (types.containsKey(classDef.typeName)) { if (types.containsKey(classDef.typeName)) {
throw new MetadataException( throw new MetadataException(
String.format("Cannot redefine type %s", classDef.typeName)); String.format("Cannot redefine type %s", classDef.typeName));
......
...@@ -50,13 +50,6 @@ public class TypeUtils { ...@@ -50,13 +50,6 @@ public class TypeUtils {
} }
} }
public static void validateName(String name) throws MetadataException {
if (!NAME_PATTERN.matcher(name).matches()) {
throw new MetadataException(
String.format("Unsupported name for an attribute '%s'", name));
}
}
public static String parseAsArrayType(String typeName) { public static String parseAsArrayType(String typeName) {
Matcher m = ARRAY_TYPE_NAME_PATTERN.matcher(typeName); Matcher m = ARRAY_TYPE_NAME_PATTERN.matcher(typeName);
return m.matches() ? m.group(1) : null; return m.matches() ? m.group(1) : null;
......
...@@ -85,7 +85,7 @@ ...@@ -85,7 +85,7 @@
</logger> </logger>
<root> <root>
<priority value="debug"/> <priority value="info"/>
<appender-ref ref="console"/> <appender-ref ref="console"/>
</root> </root>
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.AfterMethod; import org.testng.annotations.AfterMethod;
...@@ -29,6 +30,11 @@ import scala.actors.threadpool.Arrays; ...@@ -29,6 +30,11 @@ import scala.actors.threadpool.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createStructTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef;
public class TypeSystemTest extends BaseTest { public class TypeSystemTest extends BaseTest {
@BeforeClass @BeforeClass
...@@ -100,4 +106,34 @@ public class TypeSystemTest extends BaseTest { ...@@ -100,4 +106,34 @@ public class TypeSystemTest extends BaseTest {
Assert.assertFalse(Collections.disjoint(traitsNames, traits)); Assert.assertFalse(Collections.disjoint(traitsNames, traits));
} }
private String random() {
return RandomStringUtils.random(10);
}
@Test
public void testUTFNames() throws Exception {
TypeSystem ts = getTypeSystem();
String enumType = random();
EnumTypeDefinition orgLevelEnum =
new EnumTypeDefinition(enumType, new EnumValue(random(), 1), new EnumValue(random(), 2));
ts.defineEnumType(orgLevelEnum);
String structName = random();
String attrType = random();
StructTypeDefinition structType = createStructTypeDef(structName,
createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
String className = random();
HierarchicalTypeDefinition<ClassType> classType =
createClassTypeDef(className, ImmutableList.<String>of(),
createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
String traitName = random();
HierarchicalTypeDefinition<TraitType> traitType = createTraitTypeDef(traitName,
ImmutableList.<String>of(), createRequiredAttrDef(attrType, DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.of(structType), ImmutableList.of(traitType), ImmutableList.of(classType));
}
} }
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
public class ValidationTest {
@DataProvider(name = "attributeData")
private Object[][] createAttributeData() {
return new String[][]{
{null, "type"}, {"", "type"}, {"name", null}, {"name", ""}};
}
@Test (dataProvider = "attributeData", expectedExceptions = {IllegalArgumentException.class})
public void testAttributes(String name, String type) {
TypesUtil.createRequiredAttrDef(name, type);
}
@DataProvider(name = "enumValueData")
private Object[][] createEnumValueData() {
return new String[][]{{null}, {""}};
}
@Test (dataProvider = "enumValueData", expectedExceptions = {IllegalArgumentException.class})
public void testEnumValue(String name) {
new EnumValue(name, 1);
}
@DataProvider(name = "enumTypeData")
private Object[][] createEnumTypeData() {
EnumValue value = new EnumValue("name", 1);
return new Object[][]{{null, value}, {"", value}, {"name"}};
}
@Test (dataProvider = "enumTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testEnumType(String name, EnumValue... values) {
new EnumTypeDefinition(name, values);
}
@DataProvider(name = "structTypeData")
private Object[][] createStructTypeData() {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");
return new Object[][]{{null, value}, {"", value}, {"name"}};
}
@Test (dataProvider = "structTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testStructType(String name, AttributeDefinition... values) {
new StructTypeDefinition(name, values);
}
@DataProvider(name = "classTypeData")
private Object[][] createClassTypeData() {
return new Object[][]{{null}, {""}};
}
@Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testClassType(String name) {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");;
TypesUtil.createClassTypeDef(name, ImmutableList.of("super"), value);
}
@Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testTraitType(String name) {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");;
TypesUtil.createTraitTypeDef(name, ImmutableList.of("super"), value);
}
@Test
public void testValidTypes() {
AttributeDefinition attribute = TypesUtil.createRequiredAttrDef("name", "type");
//class with no attributes
TypesUtil.createClassTypeDef("name", ImmutableList.of("super"));
//class with no super types
TypesUtil.createClassTypeDef("name", ImmutableList.<String>of(), attribute);
//trait with no attributes
TypesUtil.createTraitTypeDef("name", ImmutableList.of("super"));
//trait with no super types
TypesUtil.createTraitTypeDef("name", ImmutableList.<String>of(), attribute);
}
}
...@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList; ...@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList;
import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
...@@ -82,7 +83,7 @@ public class TypesJerseyResourceIT extends BaseResourceIT { ...@@ -82,7 +83,7 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("types")); Assert.assertNotNull(response.get(MetadataServiceClient.TYPES));
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID)); Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
} }
} }
...@@ -104,10 +105,21 @@ public class TypesJerseyResourceIT extends BaseResourceIT { ...@@ -104,10 +105,21 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("definition")); Assert.assertNotNull(response.get(MetadataServiceClient.DEFINITION));
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID)); Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
String typesJson = response.getString(MetadataServiceClient.DEFINITION);
final TypesDef typesDef = TypesSerialization.fromJson(typesJson);
List<HierarchicalTypeDefinition<ClassType>> hierarchicalTypeDefinitions = typesDef.classTypesAsJavaList();
for(HierarchicalTypeDefinition<ClassType> classType : hierarchicalTypeDefinitions) {
for(AttributeDefinition attrDef : classType.attributeDefinitions) {
if("name".equals(attrDef.name)) {
Assert.assertEquals(attrDef.isIndexable, true);
Assert.assertEquals(attrDef.isUnique, true);
}
}
}
} }
} }
...@@ -196,14 +208,14 @@ public class TypesJerseyResourceIT extends BaseResourceIT { ...@@ -196,14 +208,14 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition = HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
TypesUtil.createClassTypeDef("database", TypesUtil.createClassTypeDef("database",
ImmutableList.<String>of(), ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE)); TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
typeDefinitions.add(databaseTypeDefinition); typeDefinitions.add(databaseTypeDefinition);
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil.createClassTypeDef( HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil.createClassTypeDef(
"table", "table",
ImmutableList.<String>of(), ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
new AttributeDefinition("database", new AttributeDefinition("database",
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment