Commit 5e815267 by Shwetha GS

Merge branch 'master' into dal

parents 77797892 2f8fc054
......@@ -87,11 +87,11 @@ c. Using DGI
{"Version":"v0.1"}
* List the types in the repository
curl -v http://localhost:21000/api/metadata/types/list
curl -v http://localhost:21000/api/metadata/types
{"list":["biginteger","short","byte","int","string","bigdecimal","boolean","date","double","long","float"],"requestId":"902580786@qtp-1479771328-0"}
* List the instances for a given type
curl -v http://localhost:21000/api/metadata/entities/list/hive_table
curl -v http://localhost:21000/api/metadata/entities?type=hive_table
{"requestId":"788558007@qtp-44808654-5","list":["cb9b5513-c672-42cb-8477-b8f3e537a162","ec985719-a794-4c98-b98f-0509bd23aac0","48998f81-f1d3-45a2-989a-223af5c1ed6e","a54b386e-c759-4651-8779-a099294244c4"]}
curl -v http://localhost:21000/api/metadata/entities/list/hive_db
......
......@@ -138,7 +138,7 @@
<version>2.10</version>
<executions>
<execution>
<id>copy-dependencies</id>
<id>copy-bridge-dependencies</id>
<phase>package</phase>
<goals>
<goal>copy-dependencies</goal>
......@@ -152,48 +152,84 @@
</configuration>
</execution>
<execution>
<id>copy-hook-dependencies</id>
<id>copy</id>
<phase>package</phase>
<goals>
<goal>copy-dependencies</goal>
<goal>copy</goal>
</goals>
<configuration>
<artifactItems>
<artifactItem>
<groupId>${project.groupId}</groupId>
<artifactId>${project.artifactId}</artifactId>
<version>${project.version}</version>
<overWrite>true</overWrite>
<outputDirectory>${project.build.directory}/dependency/hook/hive</outputDirectory>
<includeScope>runtime</includeScope>
<overWriteReleases>false</overWriteReleases>
<overWriteSnapshots>false</overWriteSnapshots>
<overWriteIfNewer>true</overWriteIfNewer>
</artifactItem>
</artifactItems>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>2.10</version>
<executions>
<execution>
<id>copy</id>
<id>copy-hook-dependencies</id>
<phase>package</phase>
<goals>
<goal>copy</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/dependency/hook/hive</outputDirectory>
<overWriteReleases>false</overWriteReleases>
<overWriteSnapshots>false</overWriteSnapshots>
<overWriteIfNewer>true</overWriteIfNewer>
<artifactItems>
<artifactItem>
<groupId>${project.groupId}</groupId>
<artifactId>${project.artifactId}</artifactId>
<version>${project.version}</version>
<overWrite>true</overWrite>
<outputDirectory>${project.build.directory}/dependency/bridge/hive</outputDirectory>
</artifactItem>
<artifactItem>
<groupId>org.json4s</groupId>
<artifactId>json4s-native_2.10</artifactId>
<version>${json.version}</version>
</artifactItem>
<artifactItem>
<groupId>org.json4s</groupId>
<artifactId>json4s-core_2.10</artifactId>
<version>${json.version}</version>
</artifactItem>
<artifactItem>
<groupId>org.json4s</groupId>
<artifactId>json4s-ast_2.10</artifactId>
<version>${json.version}</version>
</artifactItem>
<artifactItem>
<groupId>${project.groupId}</groupId>
<artifactId>${project.artifactId}</artifactId>
<artifactId>metadata-client</artifactId>
<version>${project.version}</version>
<overWrite>true</overWrite>
<outputDirectory>${project.build.directory}/dependency/hook/hive</outputDirectory>
</artifactItem>
<artifactItem>
<groupId>${project.groupId}</groupId>
<artifactId>metadata-typesystem</artifactId>
<version>${project.version}</version>
</artifactItem>
<artifactItem>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${scala.version}</version>
</artifactItem>
<artifactItem>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${scala.version}</version>
</artifactItem>
<artifactItem>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</artifactItem>
<artifactItem>
<groupId>org.scala-lang</groupId>
<artifactId>scalap</artifactId>
<version>${scala.version}</version>
</artifactItem>
</artifactItems>
</configuration>
......
......@@ -85,6 +85,7 @@ else
fi
export HIVE_CP
echo Using Hive configuration directory [$HIVE_CP]
echo "Logs for import are in $METADATA_LOG_DIR/import-hive.log"
${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${HIVE_CP}:${METADATACPPATH} org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge
......
......@@ -40,6 +40,7 @@ import org.apache.hadoop.metadata.typesystem.json.Serialization;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -119,8 +120,8 @@ public class HiveMetaStoreBridge {
if (results.length() == 0) {
return null;
} else {
ITypedReferenceableInstance reference = Serialization.fromJson(results.get(0).toString());
return new Referenceable(reference.getId().id, typeName, null);
String guid = getGuidFromDSLResponse(results.getJSONObject(0));
return new Referenceable(guid, typeName, null);
}
}
......@@ -192,19 +193,22 @@ public class HiveMetaStoreBridge {
//todo DSL support for reference doesn't work. is the usage right?
// String query = String.format("%s where dbName = \"%s\" and tableName = \"%s\"", typeName, dbRef.getId().id,
// tableName);
String query = String.format("%s where tableName = \"%s\"", typeName, tableName);
String query = String.format("%s where name = \"%s\"", typeName, tableName);
JSONArray results = dgiClient.searchByDSL(query);
if (results.length() == 0) {
return null;
} else {
//There should be just one instance with the given name
ITypedReferenceableInstance reference = Serialization.fromJson(results.get(0).toString());
String guid = reference.getId().id;
String guid = getGuidFromDSLResponse(results.getJSONObject(0));
LOG.debug("Got reference for table {}.{} = {}", dbRef, tableName, guid);
return new Referenceable(guid, typeName, null);
}
}
private String getGuidFromDSLResponse(JSONObject jsonObject) throws JSONException {
return jsonObject.getJSONObject("$id$").getString("id");
}
private Referenceable getSDForTable(Referenceable dbRef, String tableName) throws Exception {
Referenceable tableRef = getTableReference(dbRef, tableName);
if (tableRef == null) {
......@@ -212,7 +216,7 @@ public class HiveMetaStoreBridge {
}
MetadataServiceClient dgiClient = getMetadataServiceClient();
ITypedReferenceableInstance tableInstance = dgiClient.getEntity(tableRef.getId().id);
Referenceable tableInstance = dgiClient.getEntity(tableRef.getId().id);
Id sdId = (Id) tableInstance.get("sd");
return new Referenceable(sdId.id, sdId.getTypeName(), null);
}
......@@ -223,6 +227,7 @@ public class HiveMetaStoreBridge {
}
public Referenceable registerTable(Referenceable dbReference, String dbName, String tableName) throws Exception {
LOG.info("Attempting to register table [" + tableName + "]");
Referenceable tableRef = getTableReference(dbReference, tableName);
if (tableRef == null) {
LOG.info("Importing objects from " + dbName + "." + tableName);
......@@ -230,7 +235,7 @@ public class HiveMetaStoreBridge {
Table hiveTable = hiveClient.getTable(dbName, tableName);
tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
tableRef.set("tableName", hiveTable.getTableName());
tableRef.set("name", hiveTable.getTableName());
tableRef.set("owner", hiveTable.getOwner());
//todo fix
tableRef.set("createTime", hiveTable.getLastAccessTime());
......@@ -274,8 +279,8 @@ public class HiveMetaStoreBridge {
tableRef.set("tableType", hiveTable.getTableType());
tableRef.set("temporary", hiveTable.isTemporary());
// List<Referenceable> fieldsList = getColumns(storageDesc);
// tableRef.set("columns", fieldsList);
List<Referenceable> colList = getColumns(hiveTable.getAllCols());
tableRef.set("columns", colList);
tableRef = createInstance(tableRef);
} else {
......@@ -397,7 +402,7 @@ public class HiveMetaStoreBridge {
}
*/
List<Referenceable> fieldsList = getColumns(storageDesc);
List<Referenceable> fieldsList = getColumns(storageDesc.getCols());
sdReferenceable.set("cols", fieldsList);
List<Struct> sortColsStruct = new ArrayList<>();
......@@ -428,19 +433,19 @@ public class HiveMetaStoreBridge {
return createInstance(sdReferenceable);
}
private List<Referenceable> getColumns(StorageDescriptor storageDesc) throws Exception {
List<Referenceable> fieldsList = new ArrayList<>();
Referenceable colReferenceable;
for (FieldSchema fs : storageDesc.getCols()) {
private List<Referenceable> getColumns(List<FieldSchema> schemaList) throws Exception
{
List<Referenceable> colList = new ArrayList<>();
for (FieldSchema fs : schemaList) {
LOG.debug("Processing field " + fs);
colReferenceable = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
Referenceable colReferenceable = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
colReferenceable.set("name", fs.getName());
colReferenceable.set("type", fs.getType());
colReferenceable.set("comment", fs.getComment());
fieldsList.add(createInstance(colReferenceable));
colList.add(createInstance(colReferenceable));
}
return fieldsList;
return colList;
}
public synchronized void registerHiveDataModel() throws Exception {
......@@ -454,10 +459,6 @@ public class HiveMetaStoreBridge {
} else {
LOG.info("Hive data model is already registered!");
}
//todo remove when fromJson(entityJson) is supported on client
dataModelGenerator.createDataModel();
TypeSystem.getInstance().defineTypes(dataModelGenerator.getTypesDef());
}
public static void main(String[] argv) throws Exception {
......
......@@ -246,7 +246,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
LOG.debug("Registering CTAS query: {}", queryStr);
Referenceable processReferenceable = new Referenceable(HiveDataTypes.HIVE_PROCESS.getName());
processReferenceable.set("processName", operation.getOperationName());
processReferenceable.set("name", operation.getOperationName());
processReferenceable.set("startTime", queryStartTime);
processReferenceable.set("userName", user);
List<Referenceable> source = new ArrayList<>();
......
......@@ -367,7 +367,7 @@ public class HiveDataModelGenerator {
private void createTableClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("tableName", DataTypes.STRING_TYPE.getName(),
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -384,9 +384,9 @@ public class HiveDataModelGenerator {
new AttributeDefinition("partitionKeys",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, false, null),
// new AttributeDefinition("columns",
// DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
// Multiplicity.COLLECTION, true, null),
new AttributeDefinition("columns",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(),
......@@ -480,7 +480,7 @@ public class HiveDataModelGenerator {
private void createProcessClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("processName", DataTypes.STRING_TYPE.getName(),
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("startTime", DataTypes.INT_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......
......@@ -7,7 +7,7 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin
* hive_order(StructType) - [col, order]
* hive_resourceuri(StructType) - [resourceType, uri]
* hive_serde(StructType) - [name, serializationLib, parameters]
* hive_process(ClassType) - [processName, startTime, endTime, userName, sourceTableNames, targetTableNames, queryText, queryPlan, queryId, queryGraph]
* hive_process(ClassType) - [name, startTime, endTime, userName, sourceTableNames, targetTableNames, queryText, queryPlan, queryId, queryGraph]
* hive_function(ClassType) - [functionName, dbName, className, ownerName, ownerType, createTime, functionType, resourceUris]
* hive_type(ClassType) - [name, type1, type2, fields]
* hive_partition(ClassType) - [values, dbName, tableName, createTime, lastAccessTime, sd, parameters]
......@@ -16,7 +16,7 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin
* hive_role(ClassType) - [roleName, createTime, ownerName]
* hive_column(ClassType) - [name, type, comment]
* hive_db(ClassType) - [name, description, locationUri, parameters, ownerName, ownerType]
* hive_table(ClassType) - [tableName, dbName, owner, createTime, lastAccessTime, retention, sd, partitionKeys, parameters, viewOriginalText, viewExpandedText, tableType, temporary]
* hive_table(ClassType) - [name, dbName, owner, createTime, lastAccessTime, retention, sd, partitionKeys, columns, parameters, viewOriginalText, viewExpandedText, tableType, temporary]
---++ Importing Hive Metadata
......@@ -31,7 +31,7 @@ hive conf directory:
</property>
</verbatim>
Usage: <dgi package>/bin/import-hive.sh
Usage: <dgi package>/bin/import-hive.sh. The logs are in <dgi package>/logs/import-hive.log
---++ Hive Hook
......
......@@ -106,7 +106,7 @@ public class HiveHookIT {
}
private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "tableName", tableName);
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception {
......
......@@ -236,7 +236,7 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
}
private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "tableName", tableName);
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception {
......
......@@ -239,7 +239,7 @@ public class SSLHiveHookIT {
}
private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "tableName", tableName);
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception {
......
......@@ -26,6 +26,8 @@ import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.security.SecureClientUtils;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.Serialization;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
......@@ -50,7 +52,11 @@ public class MetadataServiceClient {
public static final String REQUEST_ID = "requestId";
public static final String RESULTS = "results";
public static final String TOTAL_SIZE = "totalSize";
private static final String BASE_URI = "api/metadata/";
private static final String URI_TYPES = "types";
private static final String URI_ENTITIES = "entities";
private static final String URI_TRAITS = "traits";
private static final String URI_SEARCH = "discovery/search";
private WebResource service;
......@@ -81,27 +87,27 @@ public class MetadataServiceClient {
static enum API {
//Type operations
CREATE_TYPE("api/metadata/types/submit", HttpMethod.POST),
GET_TYPE("api/metadata/types/definition", HttpMethod.GET),
LIST_TYPES("api/metadata/types/list", HttpMethod.GET),
LIST_TRAIT_TYPES("api/metadata/types/traits/list", HttpMethod.GET),
CREATE_TYPE(BASE_URI + URI_TYPES, HttpMethod.POST),
GET_TYPE(BASE_URI + URI_TYPES, HttpMethod.GET),
LIST_TYPES(BASE_URI + URI_TYPES, HttpMethod.GET),
LIST_TRAIT_TYPES(BASE_URI + URI_TYPES + "?type=trait", HttpMethod.GET),
//Entity operations
CREATE_ENTITY("api/metadata/entities/submit", HttpMethod.POST),
GET_ENTITY("api/metadata/entities/definition", HttpMethod.GET),
UPDATE_ENTITY("api/metadata/entities/update", HttpMethod.PUT),
LIST_ENTITY("api/metadata/entities/list", HttpMethod.GET),
CREATE_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.POST),
GET_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.GET),
UPDATE_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.PUT),
LIST_ENTITY(BASE_URI + URI_ENTITIES + "?type=", HttpMethod.GET),
//Trait operations
ADD_TRAITS("api/metadata/traits/add", HttpMethod.POST),
DELETE_TRAITS("api/metadata/traits/delete", HttpMethod.PUT),
LIST_TRAITS("api/metadata/traits/list", HttpMethod.GET),
ADD_TRAITS(BASE_URI + URI_TRAITS, HttpMethod.POST),
DELETE_TRAITS(BASE_URI + URI_TRAITS, HttpMethod.DELETE),
LIST_TRAITS(BASE_URI + URI_TRAITS, HttpMethod.GET),
//Search operations
SEARCH("api/metadata/discovery/search", HttpMethod.GET),
SEARCH_DSL("api/metadata/discovery/search/dsl", HttpMethod.GET),
SEARCH_GREMLIN("api/metadata/discovery/search/gremlin", HttpMethod.GET),
SEARCH_FULL_TEXT("api/metadata/discovery/search/fulltext", HttpMethod.GET);
SEARCH(BASE_URI + URI_SEARCH, HttpMethod.GET),
SEARCH_DSL(BASE_URI + URI_SEARCH + "/dsl", HttpMethod.GET),
SEARCH_GREMLIN(BASE_URI + URI_SEARCH + "/gremlin", HttpMethod.GET),
SEARCH_FULL_TEXT(BASE_URI + URI_SEARCH + "/fulltext", HttpMethod.GET);
private final String method;
private final String path;
......@@ -176,11 +182,11 @@ public class MetadataServiceClient {
* @return result json object
* @throws MetadataServiceException
*/
public ITypedReferenceableInstance getEntity(String guid) throws MetadataServiceException {
public Referenceable getEntity(String guid) throws MetadataServiceException {
JSONObject jsonResponse = callAPI(API.GET_ENTITY, null, guid);
try {
String entityInstanceDefinition = jsonResponse.getString(MetadataServiceClient.RESULTS);
return Serialization.fromJson(entityInstanceDefinition);
return InstanceSerialization.fromJsonReferenceable(entityInstanceDefinition, true);
} catch (JSONException e) {
throw new MetadataServiceException(e);
}
......
......@@ -86,6 +86,7 @@
<scala.version>2.10.4</scala.version>
<scala.binary.version>2.10</scala.binary.version>
<scala.macros.version>2.0.1</scala.macros.version>
<json.version>3.2.11</json.version>
<log4j.version>1.2.17</log4j.version>
<akka.version>2.3.7</akka.version>
<spray.version>1.3.1</spray.version>
......@@ -506,7 +507,7 @@
<dependency>
<groupId>org.json4s</groupId>
<artifactId>json4s-native_2.10</artifactId>
<version>3.2.11</version>
<version>${json.version}</version>
</dependency>
<dependency>
......@@ -945,6 +946,7 @@
<exclude>**/maven-eclipse.xml</exclude>
<exclude>**/.externalToolBuilders/**</exclude>
<exclude>dashboard/**</exclude>
<exclude>**/build.log</exclude>
</excludes>
</configuration>
<executions>
......
......@@ -100,7 +100,6 @@ public class HiveLineageService implements LineageService {
public String getOutputs(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage outputs for tableName={}", tableName);
try {
HiveWhereUsedQuery outputsQuery = new HiveWhereUsedQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
......@@ -108,9 +107,11 @@ public class HiveLineageService implements LineageService {
graphPersistenceStrategy, titanGraph);
Expressions.Expression expression = outputsQuery.expr();
LOG.debug("Expression is [" + expression.toString() +"]");
try {
return discoveryService.evaluate(expression).toJson();
} catch (Exception e) { // unable to catch ExpressionException
throw new DiscoveryException("Invalid expression", e);
throw new DiscoveryException("Invalid expression [" + expression.toString() + "]", e);
}
}
......@@ -124,7 +125,6 @@ public class HiveLineageService implements LineageService {
public String getInputs(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage inputs for tableName={}", tableName);
try {
HiveLineageQuery inputsQuery = new HiveLineageQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
......@@ -132,9 +132,11 @@ public class HiveLineageService implements LineageService {
graphPersistenceStrategy, titanGraph);
Expressions.Expression expression = inputsQuery.expr();
LOG.debug("Expression is [" + expression.toString() +"]");
try {
return discoveryService.evaluate(expression).toJson();
} catch (Exception e) { // unable to catch ExpressionException
throw new DiscoveryException("Invalid expression", e);
throw new DiscoveryException("Invalid expression [" + expression.toString() + "]", e);
}
}
......@@ -148,9 +150,10 @@ public class HiveLineageService implements LineageService {
public String getSchema(String tableName) throws DiscoveryException {
// todo - validate if indeed this is a table type and exists
String schemaQuery = HIVE_TABLE_TYPE_NAME
+ " where name=\"" + tableName + "\", "
+ HIVE_TABLE_COLUMNS_ATTRIBUTE_NAME;
// + " as column select column.name, column.dataType, column.comment";
+ " where name=\"" + tableName + "\""
+ ", " + HIVE_TABLE_COLUMNS_ATTRIBUTE_NAME
// + " as column select column.name, column.dataType, column.comment"
;
return discoveryService.searchByDSL(schemaQuery);
}
}
......@@ -73,7 +73,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
}
//Refer http://s3.thinkaurelius.com/docs/titan/0.5.0/index-backends.html for indexed query
//Refer http://s3.thinkaurelius.com/docs/titan/0.5.4/index-backends.html for indexed query
//http://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query
// .html#query-string-syntax for query syntax
@Override
......
......@@ -128,7 +128,16 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
@Override
public String getEdgeLabel(IDataType<?> dataType, AttributeInfo aInfo) {
return EDGE_LABEL_PREFIX + dataType.getName() + "." + aInfo.name;
return getEdgeLabel(dataType.getName(), aInfo.name);
}
public String getEdgeLabel(String typeName, String attrName) {
return EDGE_LABEL_PREFIX + typeName + "." + attrName;
}
public String getEdgeLabel(ITypedInstance typedInstance, AttributeInfo aInfo) throws MetadataException {
IDataType dataType = typeSystem.getDataType(IDataType.class, typedInstance.getTypeName());
return getEdgeLabel(dataType, aInfo);
}
@Override
......@@ -275,7 +284,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
final String entityTypeName = getTypeName(instanceVertex);
String relationshipLabel = entityTypeName + "." + traitNameToBeDeleted;
String relationshipLabel = getEdgeLabel(entityTypeName, traitNameToBeDeleted);
Iterator<Edge> results = instanceVertex.getEdges(
Direction.OUT, relationshipLabel).iterator();
if (results.hasNext()) { // there should only be one edge for this label
......@@ -673,6 +682,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
Object attrValue = typedInstance.get(attributeInfo.name);
LOG.debug("mapping attribute {} = {}", attributeInfo.name, attrValue);
final String propertyName = getQualifiedName(typedInstance, attributeInfo);
String edgeLabel = getEdgeLabel(typedInstance, attributeInfo);
if (attrValue == null) {
return;
}
......@@ -698,11 +708,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
case STRUCT:
Vertex structInstanceVertex = mapStructInstanceToVertex(id,
(ITypedStruct) typedInstance.get(attributeInfo.name),
attributeInfo, idToVertexMap);
(ITypedStruct) typedInstance.get(attributeInfo.name), attributeInfo, idToVertexMap);
// add an edge to the newly created vertex from the parent
GraphHelper.addEdge(
titanGraph, instanceVertex, structInstanceVertex, propertyName);
titanGraph, instanceVertex, structInstanceVertex, edgeLabel);
break;
case TRAIT:
......@@ -712,7 +721,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
case CLASS:
Id referenceId = (Id) typedInstance.get(attributeInfo.name);
mapClassReferenceAsEdge(
instanceVertex, idToVertexMap, propertyName, referenceId);
instanceVertex, idToVertexMap, edgeLabel, referenceId);
break;
default:
......@@ -886,7 +895,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
traitInstance.fieldMapping().fields, idToVertexMap);
// add an edge to the newly created vertex from the parent
String relationshipLabel = typedInstanceTypeName + "." + traitName;
String relationshipLabel = getEdgeLabel(typedInstanceTypeName, traitName);
GraphHelper.addEdge(
titanGraph, parentInstanceVertex, traitInstanceVertex, relationshipLabel);
}
......@@ -1017,7 +1026,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
break;
case CLASS:
String relationshipLabel = getQualifiedName(typedInstance, attributeInfo);
String relationshipLabel = getEdgeLabel(typedInstance, attributeInfo);
Object idOrInstance = mapClassReferenceToVertex(instanceVertex,
attributeInfo, relationshipLabel, attributeInfo.dataType());
typedInstance.set(attributeInfo.name, idOrInstance);
......@@ -1221,7 +1230,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
ITypedStruct structInstance = structType.createInstance();
typedInstance.set(attributeInfo.name, structInstance);
String relationshipLabel = getQualifiedName(typedInstance, attributeInfo);
String relationshipLabel = getEdgeLabel(typedInstance, attributeInfo);
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) {
final Vertex structInstanceVertex = edge.getVertex(Direction.IN);
......
......@@ -219,7 +219,8 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
case CLASS:
// this is only A reference, index the attribute for edge
createEdgeMixedIndex(propertyName);
// Commenting this out since we do not need an index for edge here
//createEdgeMixedIndex(propertyName);
break;
default:
......@@ -314,15 +315,23 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
.dataType(propertyClass)
.make();
if (propertyClass == Boolean.class) {
//Use standard index as backing index only supports string, int and geo types
management.buildIndex(propertyName, Vertex.class).addKey(propertyKey).buildCompositeIndex();
management.commit();
} else {
//Use backing index
TitanGraphIndex vertexIndex = management.getGraphIndex(Constants.VERTEX_INDEX);
management.addIndexKey(vertexIndex, propertyKey);
management.commit();
}
LOG.info("Created mixed vertex index for property {}", propertyName);
}
return propertyKey;
}
/* Commenting this out since we do not need an index for edge label here
private void createEdgeMixedIndex(String propertyName) {
TitanManagement management = titanGraph.getManagementSystem();
EdgeLabel edgeLabel = management.getEdgeLabel(propertyName);
......@@ -332,5 +341,5 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
management.commit();
LOG.info("Created index for edge label {}", propertyName);
}
}
} */
}
......@@ -25,6 +25,8 @@ import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Singleton;
import java.util.Iterator;
......@@ -34,6 +36,8 @@ import java.util.Iterator;
*/
public class TitanGraphProvider implements GraphProvider<TitanGraph> {
private static final Logger LOG = LoggerFactory.getLogger(TitanGraphProvider.class);
/**
* Constant for the configuration property that indicates the prefix.
*/
......@@ -51,6 +55,7 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> {
String value = (String) configProperties.getProperty(key);
key = key.substring(METADATA_PREFIX.length());
graphConfig.setProperty(key, value);
LOG.info("Using graph property {}={}", key, value);
}
}
......
......@@ -207,8 +207,11 @@ public class GraphBackedTypeStore implements ITypeStore {
@Override
public TypesDef restore() throws MetadataException {
try {
titanGraph.rollback(); //Cleanup previous state
//Get all vertices for type system
Iterator vertices = titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE).vertices().iterator();
Iterator vertices =
titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE).vertices().iterator();
ImmutableList.Builder<EnumTypeDefinition> enums = ImmutableList.builder();
ImmutableList.Builder<StructTypeDefinition> structs = ImmutableList.builder();
......@@ -220,7 +223,7 @@ public class GraphBackedTypeStore implements ITypeStore {
DataTypes.TypeCategory typeCategory = vertex.getProperty(Constants.TYPE_CATEGORY_PROPERTY_KEY);
String typeName = vertex.getProperty(Constants.TYPENAME_PROPERTY_KEY);
LOG.info("Restoring type {}.{}", typeCategory, typeName);
switch(typeCategory) {
switch (typeCategory) {
case ENUM:
enums.add(getEnumType(vertex));
break;
......@@ -246,7 +249,11 @@ public class GraphBackedTypeStore implements ITypeStore {
throw new IllegalArgumentException("Unhandled type category " + typeCategory);
}
}
titanGraph.commit();
return TypeUtils.getTypesDef(enums.build(), structs.build(), traits.build(), classTypes.build());
} finally {
titanGraph.rollback();
}
}
private EnumTypeDefinition getEnumType(Vertex vertex) {
......
......@@ -20,6 +20,7 @@ package org.apache.hadoop.metadata.services;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.inject.Injector;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.discovery.SearchIndexer;
import org.apache.hadoop.metadata.listener.EntityChangeListener;
......@@ -34,11 +35,7 @@ import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.Serialization$;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.*;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
......@@ -77,9 +74,22 @@ public class DefaultMetadataService implements MetadataService {
this.typeSystem = TypeSystem.getInstance();
this.repository = repository;
restoreTypeSystem();
registerListener(searchIndexer);
}
private void restoreTypeSystem() {
LOG.info("Restoring type system from the store");
try {
TypesDef typesDef = typeStore.restore();
typeSystem.defineTypes(typesDef);
} catch (MetadataException e) {
throw new RuntimeException(e);
}
LOG.info("Restored type system from the store");
}
/**
* Creates a new type based on the type system to enable adding
* entities (instances for types).
......@@ -144,8 +154,8 @@ public class DefaultMetadataService implements MetadataService {
* @return list of trait type names in the type system
*/
@Override
public List<String> getTraitNamesList() throws MetadataException {
return typeSystem.getTraitsNames();
public List<String> getTypeNamesByCategory(DataTypes.TypeCategory typeCategory) throws MetadataException {
return typeSystem.getTypeNamesByCategory(typeCategory);
}
/**
......@@ -195,7 +205,7 @@ public class DefaultMetadataService implements MetadataService {
Preconditions.checkNotNull(guid, "guid cannot be null");
final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
return Serialization$.MODULE$.toJson(instance);
return InstanceSerialization.toJson(instance, true);
}
/**
......
......@@ -19,6 +19,7 @@
package org.apache.hadoop.metadata.services;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.codehaus.jettison.json.JSONObject;
import java.util.List;
......@@ -57,7 +58,7 @@ public interface MetadataService {
*
* @return list of trait type names in the type system
*/
List<String> getTraitNamesList() throws MetadataException;
List<String> getTypeNamesByCategory(DataTypes.TypeCategory typeCategory) throws MetadataException;
/**
* Creates an entity, instance of the type.
......
......@@ -29,6 +29,7 @@ import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.query.HiveTitanSample;
import org.apache.hadoop.metadata.query.QueryTestsUtils;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphBackedSearchIndexer;
import org.apache.hadoop.metadata.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
......@@ -224,6 +225,7 @@ public class GraphBackedDiscoveryServiceTest {
{"Table as _loop0 loop (LoadProcess outputTable) withPath"},
{"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as destTable withPath"},
{"Table as t, sd, Column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType"},
{"Table where name='sales_fact', db where name='Reporting'"}
};
}
......@@ -268,39 +270,6 @@ public class GraphBackedDiscoveryServiceTest {
}
@Test
public void testSearchByDSLQuery() throws Exception {
String dslQuery = "Column as PII";
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults);
Assert.assertEquals(results.length(), 3);
System.out.println("results = " + results);
Object query = results.get("query");
Assert.assertNotNull(query);
JSONObject dataType = results.getJSONObject("dataType");
Assert.assertNotNull(dataType);
String typeName = dataType.getString("typeName");
Assert.assertNotNull(typeName);
JSONArray rows = results.getJSONArray("rows");
Assert.assertNotNull(rows);
Assert.assertTrue(rows.length() > 0);
for (int index = 0; index < rows.length(); index++) {
JSONObject row = rows.getJSONObject(index);
String type = row.getString("$typeName$");
Assert.assertEquals(type, "Column");
String name = row.getString("name");
Assert.assertNotEquals(name, "null");
}
}
@Test
public void testSearchForTypeInheritance() throws Exception {
createTypesWithMultiLevelInheritance();
createInstances();
......
......@@ -145,8 +145,7 @@ public class GraphBackedMetadataRepositoryTest {
@Test (dependsOnMethods = "testSubmitEntity")
public void testGetTraitLabel() throws Exception {
Assert.assertEquals(repositoryService.getTraitLabel(
typeSystem.getDataType(ClassType.class, TABLE_TYPE),
Assert.assertEquals(repositoryService.getTraitLabel(typeSystem.getDataType(ClassType.class, TABLE_TYPE),
CLASSIFICATION), TABLE_TYPE + "." + CLASSIFICATION);
}
......@@ -317,6 +316,39 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertEquals(repositoryService.getTypeName(tableVertex), TABLE_TYPE);
}
@Test(dependsOnMethods = "testCreateEntity")
public void testSearchByDSLQuery() throws Exception {
String dslQuery = "hive_database as PII";
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults);
Assert.assertEquals(results.length(), 3);
System.out.println("results = " + results);
Object query = results.get("query");
Assert.assertNotNull(query);
JSONObject dataType = results.getJSONObject("dataType");
Assert.assertNotNull(dataType);
String typeName = dataType.getString("typeName");
Assert.assertNotNull(typeName);
JSONArray rows = results.getJSONArray("rows");
Assert.assertNotNull(rows);
Assert.assertTrue(rows.length() > 0);
for (int index = 0; index < rows.length(); index++) {
JSONObject row = rows.getJSONObject(index);
String type = row.getString("$typeName$");
Assert.assertEquals(type, "hive_database");
String name = row.getString("name");
Assert.assertEquals(name, DATABASE_NAME);
}
}
/**
* Full text search requires GraphBackedSearchIndexer, and GraphBackedSearchIndexer can't be enabled in
* GraphBackedDiscoveryServiceTest because of its test data. So, test for full text search is in
......
......@@ -30,12 +30,13 @@ metadata.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs #########
# This models follows the quick-start guide
metadata.lineage.hive.table.type.name=Table
metadata.lineage.hive.column.type.name=Column
metadata.lineage.hive.table.type.name=hive_table
metadata.lineage.hive.table.column.name=columns
metadata.lineage.hive.process.type.name=LoadProcess
metadata.lineage.hive.process.type.name=hive_process
metadata.lineage.hive.process.inputs.name=inputTables
metadata.lineage.hive.process.outputs.name=outputTables
#Currently unused
#metadata.lineage.hive.column.type.name=Column
######### Security Properties #########
......
......@@ -18,7 +18,9 @@
package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Multimap;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.classification.InterfaceAudience;
import org.apache.hadoop.metadata.typesystem.TypesDef;
......@@ -55,9 +57,9 @@ public class TypeSystem {
private IdType idType;
/**
* An in-memory copy of list of traits for convenience.
* An in-memory copy of type categories vs types for convenience.
*/
private List<String> traitTypes;
private Multimap<DataTypes.TypeCategory, String> typeCategoriesToTypeNamesMap;
private ImmutableList<String> coreTypes;
......@@ -79,7 +81,7 @@ public class TypeSystem {
private void initialize() {
types = new ConcurrentHashMap<>();
traitTypes = new ArrayList<>();
typeCategoriesToTypeNamesMap = ArrayListMultimap.create(DataTypes.TypeCategory.values().length, 10);
registerPrimitiveTypes();
registerCoreTypes();
......@@ -94,12 +96,8 @@ public class TypeSystem {
return ImmutableList.copyOf(types.keySet());
}
public ImmutableList<String> getTraitsNames() {
return ImmutableList.copyOf(traitTypes);
}
private void addTraitName(String traitName) {
traitTypes.add(traitName);
public ImmutableList<String> getTypeNamesByCategory(DataTypes.TypeCategory typeCategory) {
return ImmutableList.copyOf(typeCategoriesToTypeNamesMap.get(typeCategory));
}
private void registerPrimitiveTypes() {
......@@ -114,6 +112,8 @@ public class TypeSystem {
types.put(DataTypes.BIGDECIMAL_TYPE.getName(), DataTypes.BIGDECIMAL_TYPE);
types.put(DataTypes.DATE_TYPE.getName(), DataTypes.DATE_TYPE);
types.put(DataTypes.STRING_TYPE.getName(), DataTypes.STRING_TYPE);
typeCategoriesToTypeNamesMap.putAll(DataTypes.TypeCategory.PRIMITIVE, types.keySet());
}
......@@ -267,6 +267,7 @@ public class TypeSystem {
assert elemType != null;
DataTypes.ArrayType dT = new DataTypes.ArrayType(elemType);
types.put(dT.getName(), dT);
typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.ARRAY, dT.getName());
return dT;
}
......@@ -276,6 +277,7 @@ public class TypeSystem {
assert valueType != null;
DataTypes.MapType dT = new DataTypes.MapType(keyType, valueType);
types.put(dT.getName(), dT);
typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.MAP, dT.getName());
return dT;
}
......@@ -291,6 +293,7 @@ public class TypeSystem {
}
EnumType eT = new EnumType(this, eDef.name, eDef.enumValues);
types.put(eDef.name, eT);
typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.ENUM, eDef.name);
return eT;
}
......@@ -520,17 +523,19 @@ public class TypeSystem {
for (StructTypeDefinition structDef : structDefs) {
constructStructureType(structDef);
typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.CLASS, structDef.typeName);
}
for (TraitType traitType : traitTypes) {
constructHierarchicalType(TraitType.class,
traitNameToDefMap.get(traitType.getName()));
addTraitName(traitType.getName());
typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.TRAIT, traitType.getName());
}
for (ClassType classType : classTypes) {
constructHierarchicalType(ClassType.class,
classNameToDefMap.get(classType.getName()));
typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.CLASS, classType.getName());
}
}
......
......@@ -86,7 +86,7 @@ public class TypeSystemTest extends BaseTest {
soxTrait, secTrait, financeTrait),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
final ImmutableList<String> traitsNames = getTypeSystem().getTraitsNames();
final ImmutableList<String> traitsNames = getTypeSystem().getTypeNamesByCategory(DataTypes.TypeCategory.TRAIT);
Assert.assertEquals(traitsNames.size(), 7);
List traits = Arrays.asList(new String[]{
"Classification",
......
......@@ -20,7 +20,11 @@ package org.apache.hadoop.metadata.web.listeners;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.TypeLiteral;
import com.google.inject.matcher.Matchers;
import com.google.inject.servlet.GuiceServletContextListener;
import com.google.inject.spi.TypeEncounter;
import com.google.inject.spi.TypeListener;
import com.sun.jersey.api.core.PackagesResourceConfig;
import com.sun.jersey.guice.JerseyServletModule;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
......@@ -41,6 +45,8 @@ import javax.servlet.ServletContextEvent;
import java.util.HashMap;
import java.util.Map;
import static com.google.inject.matcher.Matchers.*;
public class GuiceServletConfig extends GuiceServletContextListener {
private static final Logger LOG = LoggerFactory.getLogger(GuiceServletConfig.class);
......@@ -105,22 +111,6 @@ public class GuiceServletConfig extends GuiceServletContextListener {
// perform login operations
LoginProcessor loginProcessor = new LoginProcessor();
loginProcessor.login();
restoreTypeSystem();
}
private void restoreTypeSystem() {
LOG.info("Restoring type system from the store");
Injector injector = getInjector();
ITypeStore typeStore = injector.getInstance(ITypeStore.class);
try {
TypesDef typesDef = typeStore.restore();
TypeSystem typeSystem = injector.getInstance(TypeSystem.class);
typeSystem.defineTypes(typesDef);
} catch (MetadataException e) {
throw new RuntimeException(e);
}
LOG.info("Restored type system from the store");
}
@Override
......
......@@ -32,16 +32,7 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
......@@ -80,7 +71,6 @@ public class EntityResource {
* Submits an entity definition (instance) corresponding to a given type.
*/
@POST
@Path("submit")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response submit(@Context HttpServletRequest request) {
......@@ -111,7 +101,7 @@ public class EntityResource {
* @param guid GUID for the entity
*/
@GET
@Path("definition/{guid}")
@Path("{guid}")
@Produces(MediaType.APPLICATION_JSON)
public Response getEntityDefinition(@PathParam("guid") String guid) {
Preconditions.checkNotNull(guid, "Entity GUID cannot be null");
......@@ -157,9 +147,8 @@ public class EntityResource {
* @param resultsPerPage number of results for pagination
*/
@GET
@Path("list/{entityType}")
@Produces(MediaType.APPLICATION_JSON)
public Response getEntityList(@PathParam("entityType") String entityType,
public Response getEntityListByType(@QueryParam("type") String entityType,
@DefaultValue("0") @QueryParam("offset") Integer offset,
@QueryParam("numResults") Integer resultsPerPage) {
Preconditions.checkNotNull(entityType, "Entity type cannot be null");
......@@ -193,7 +182,7 @@ public class EntityResource {
* @return response payload as json
*/
@PUT
@Path("update/{guid}")
@Path("{guid}")
@Produces(MediaType.APPLICATION_JSON)
public Response update(@PathParam("guid") String guid,
@QueryParam("property") String property,
......@@ -223,7 +212,7 @@ public class EntityResource {
* @return a list of trait names for the given entity guid
*/
@GET
@Path("traits/list/{guid}")
@Path("{guid}/traits")
@Produces(MediaType.APPLICATION_JSON)
public Response getTraitNames(@PathParam("guid") String guid) {
Preconditions.checkNotNull(guid, "Entity GUID cannot be null");
......@@ -256,7 +245,7 @@ public class EntityResource {
* @param guid globally unique identifier for the entity
*/
@POST
@Path("traits/add/{guid}")
@Path("{guid}/traits")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response addTrait(@Context HttpServletRequest request,
......@@ -291,8 +280,8 @@ public class EntityResource {
* @param guid globally unique identifier for the entity
* @param traitName name of the trait
*/
@PUT
@Path("traits/delete/{guid}/{traitName}")
@DELETE
@Path("{guid}/traits/{traitName}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response deleteTrait(@Context HttpServletRequest request,
......@@ -312,11 +301,11 @@ public class EntityResource {
return Response.ok(response).build();
} catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to add trait name={} for entity={}", traitName, guid, e);
LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
LOG.error("Unable to add trait name={} for entity={}", traitName, guid, e);
LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
......
......@@ -31,12 +31,7 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
......@@ -69,7 +64,7 @@ public class HiveLineageResource {
* @param tableName table name
*/
@GET
@Path("inputs/{tableName}")
@Path("table/{tableName}/inputs")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response inputs(@Context HttpServletRequest request,
......@@ -103,11 +98,12 @@ public class HiveLineageResource {
* @param tableName table name
*/
@GET
@Path("outputs/{tableName}")
@Path("table/{tableName}/outputs")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response outputs(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching lineage outputs for tableName={}", tableName);
......@@ -137,11 +133,12 @@ public class HiveLineageResource {
* @param tableName table name
*/
@GET
@Path("schema/{tableName}")
@Path("table/{tableName}/schema")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response schema(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching schema for tableName={}", tableName);
......
......@@ -18,9 +18,11 @@
package org.apache.hadoop.metadata.web.resources;
import com.google.common.base.Preconditions;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
......@@ -31,13 +33,7 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
......@@ -59,6 +55,8 @@ public class TypesResource {
private final MetadataService metadataService;
static final String TYPE_ALL = "all";
@Inject
public TypesResource(MetadataService metadataService) {
this.metadataService = metadataService;
......@@ -69,7 +67,6 @@ public class TypesResource {
* domain. Could represent things like Hive Database, Hive Table, etc.
*/
@POST
@Path("submit")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response submit(@Context HttpServletRequest request) {
......@@ -97,7 +94,7 @@ public class TypesResource {
* @param typeName name of a type which is unique.
*/
@GET
@Path("definition/{typeName}")
@Path("{typeName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getDefinition(@Context HttpServletRequest request,
@PathParam("typeName") String typeName) {
......@@ -122,44 +119,31 @@ public class TypesResource {
}
/**
* Gets the list of type names registered in the type system.
*/
@GET
@Path("list")
@Produces(MediaType.APPLICATION_JSON)
public Response getTypeNames(@Context HttpServletRequest request) {
try {
final List<String> typeNamesList = metadataService.getTypeNamesList();
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.RESULTS, new JSONArray(typeNamesList));
response.put(MetadataServiceClient.TOTAL_SIZE, typeNamesList.size());
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build();
} catch (Exception e) {
LOG.error("Unable to get types list", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
}
}
/**
* Gets the list of trait type names registered in the type system.
*/
@GET
@Path("traits/list")
@Produces(MediaType.APPLICATION_JSON)
public Response getTraitNames(@Context HttpServletRequest request) {
public Response getTypesByFilter(@Context HttpServletRequest request,
@DefaultValue(TYPE_ALL) @QueryParam("type") String type) {
try {
final List<String> traitNamesList = metadataService.getTraitNamesList();
List<String> result = null;
if (TYPE_ALL.equals(type)) {
result = metadataService.getTypeNamesList();
} else {
DataTypes.TypeCategory typeCategory = DataTypes.TypeCategory.valueOf(type);
result = metadataService.getTypeNamesByCategory(typeCategory);
}
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.RESULTS, new JSONArray(traitNamesList));
response.put(MetadataServiceClient.TOTAL_SIZE, traitNamesList.size());
response.put(MetadataServiceClient.RESULTS, new JSONArray(result));
response.put(MetadataServiceClient.TOTAL_SIZE, result.size());
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build();
} catch(IllegalArgumentException ie) {
LOG.error("Unsupported typeName while retrieving type list {}", type);
throw new WebApplicationException(
Servlets.getErrorResponse("Unsupported type " + type, Response.Status.BAD_REQUEST));
} catch (Exception e) {
LOG.error("Unable to get types list", e);
throw new WebApplicationException(
......
......@@ -28,6 +28,8 @@ import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
......@@ -59,13 +61,16 @@ public abstract class BaseResourceIT {
}
protected void createType(TypesDef typesDef) throws Exception {
HierarchicalTypeDefinition<ClassType> sampleType = typesDef.classTypesAsJavaList().get(0);
if (serviceClient.getType(sampleType.typeName) == null ) {
String typesAsJSON = TypesSerialization.toJson(typesDef);
createType(typesAsJSON);
}
}
protected void createType(String typesAsJSON) throws Exception {
WebResource resource = service
.path("api/metadata/types/submit");
.path("api/metadata/types");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
......
......@@ -66,6 +66,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
private static final String DATABASE_NAME = "foo";
private static final String TABLE_TYPE = "hive_table_type";
private static final String TABLE_NAME = "bar";
private static final String TRAITS = "traits";
private static final String TRAIT = "trait";
private Referenceable tableInstance;
private Id tableId;
......@@ -148,11 +150,12 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
final String definition = response.getString(MetadataServiceClient.RESULTS);
Assert.assertNotNull(definition);
LOG.debug("tableInstanceAfterGet = " + definition);
InstanceSerialization.fromJsonReferenceable(definition, true);
}
private ClientResponse addProperty(String guid, String property, String value) {
WebResource resource = service
.path("api/metadata/entities/update")
.path("api/metadata/entities")
.path(guid);
return resource.queryParam("property", property).queryParam("value", value)
......@@ -163,7 +166,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
private ClientResponse getEntityDefinition(String guid) {
WebResource resource = service
.path("api/metadata/entities/definition")
.path("api/metadata/entities")
.path(guid);
return resource.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
......@@ -182,7 +185,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@Test
public void testGetInvalidEntityDefinition() throws Exception {
WebResource resource = service
.path("api/metadata/entities/definition")
.path("api/metadata/entities")
.path("blah");
ClientResponse clientResponse = resource
......@@ -198,8 +201,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@Test(dependsOnMethods = "testSubmitEntity")
public void testGetEntityList() throws Exception {
ClientResponse clientResponse = service
.path("api/metadata/entities/list/")
.path(TABLE_TYPE)
.path("api/metadata/entities")
.queryParam("type", TABLE_TYPE)
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
......@@ -219,7 +222,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@Test
public void testGetEntityListForBadEntityType() throws Exception {
ClientResponse clientResponse = service
.path("api/metadata/entities/list/blah")
.path("api/metadata/entities")
.queryParam("type", "blah")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
......@@ -235,7 +239,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
addNewType();
ClientResponse clientResponse = service
.path("api/metadata/entities/list/test")
.path("api/metadata/entities")
.queryParam("type", "test")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
......@@ -266,8 +271,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
public void testGetTraitNames() throws Exception {
final String guid = tableId._getId();
ClientResponse clientResponse = service
.path("api/metadata/entities/traits/list")
.path("api/metadata/entities")
.path(guid)
.path(TRAITS)
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
......@@ -299,8 +305,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
final String guid = tableId._getId();
ClientResponse clientResponse = service
.path("api/metadata/entities/traits/add")
.path("api/metadata/entities")
.path(guid)
.path(TRAITS)
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON);
......@@ -328,8 +335,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
LOG.debug("traitInstanceAsJSON = " + traitInstanceAsJSON);
ClientResponse clientResponse = service
.path("api/metadata/entities/traits/add")
.path("api/metadata/entities")
.path("random")
.path(TRAITS)
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON);
......@@ -343,12 +351,13 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
final String guid = tableId._getId();
ClientResponse clientResponse = service
.path("api/metadata/entities/traits/delete")
.path("api/metadata/entities")
.path(guid)
.path(TRAITS)
.path(traitName)
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.PUT, ClientResponse.class);
.method(HttpMethod.DELETE, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
......@@ -365,12 +374,13 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
final String traitName = "blah_trait";
ClientResponse clientResponse = service
.path("api/metadata/entities/traits/delete")
.path("api/metadata/entities")
.path("random")
.path(TRAITS)
.path(traitName)
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.PUT, ClientResponse.class);
.method(HttpMethod.DELETE, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(),
Response.Status.BAD_REQUEST.getStatusCode());
}
......@@ -410,7 +420,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
new AttributeDefinition("serde2",
"serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("database",
DATABASE_TYPE, Multiplicity.REQUIRED, true, null));
DATABASE_TYPE, Multiplicity.REQUIRED, true, null),
new AttributeDefinition("compressed",
DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, true, null));
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition =
TypesUtil.createTraitTypeDef("classification",
......@@ -451,6 +463,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
tableInstance.set("level", 2);
tableInstance.set("tableType", 1); // enum
tableInstance.set("database", databaseInstance);
tableInstance.set("compressed", false);
Struct traitInstance = (Struct) tableInstance.getTrait("classification");
traitInstance.set("tag", "foundation_etl");
......
......@@ -53,6 +53,8 @@ import java.util.List;
*/
public class HiveLineageJerseyResourceIT extends BaseResourceIT {
private static final String BASE_URI = "api/metadata/lineage/hive/table/";
@BeforeClass
public void setUp() throws Exception {
super.setUp();
......@@ -64,8 +66,9 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
@Test
public void testInputs() throws Exception {
WebResource resource = service
.path("api/metadata/lineage/hive/inputs")
.path("sales_fact_monthly_mv");
.path(BASE_URI)
.path("sales_fact_monthly_mv")
.path("inputs");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
......@@ -94,8 +97,9 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
@Test
public void testOutputs() throws Exception {
WebResource resource = service
.path("api/metadata/lineage/hive/outputs")
.path("sales_fact");
.path(BASE_URI)
.path("sales_fact")
.path("outputs");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
......@@ -124,8 +128,9 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
@Test
public void testSchema() throws Exception {
WebResource resource = service
.path("api/metadata/lineage/hive/schema")
.path("sales_fact");
.path(BASE_URI)
.path("sales_fact")
.path("schema");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
......
......@@ -70,7 +70,7 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
System.out.println("typesAsJSON = " + typesAsJSON);
WebResource resource = service
.path("api/metadata/types/submit");
.path("api/metadata/types");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
......@@ -93,7 +93,7 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
System.out.println("typeName = " + typeDefinition.typeName);
WebResource resource = service
.path("api/metadata/types/definition")
.path("api/metadata/types")
.path(typeDefinition.typeName);
ClientResponse clientResponse = resource
......@@ -114,7 +114,7 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
@Test
public void testGetDefinitionForNonexistentType() throws Exception {
WebResource resource = service
.path("api/metadata/types/definition")
.path("api/metadata/types")
.path("blah");
ClientResponse clientResponse = resource
......@@ -127,7 +127,7 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
@Test(dependsOnMethods = "testSubmit")
public void testGetTypeNames() throws Exception {
WebResource resource = service
.path("api/metadata/types/list");
.path("api/metadata/types");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
......@@ -150,9 +150,10 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
String[] traitsAdded = addTraits();
WebResource resource = service
.path("api/metadata/types/traits/list");
.path("api/metadata/types");
ClientResponse clientResponse = resource
.queryParam("type", DataTypes.TypeCategory.TRAIT.name())
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment