Commit ea38da10 by Venkatesh Seetharam

ISSUE-38 Map type to graph with type prefixes to enable search. Contributed by Venkatesh Seetharam

parent 386d8d38
...@@ -29,13 +29,13 @@ import com.google.inject.Scopes; ...@@ -29,13 +29,13 @@ import com.google.inject.Scopes;
import com.google.inject.throwingproviders.ThrowingProviderBinder; import com.google.inject.throwingproviders.ThrowingProviderBinder;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.hadoop.metadata.services.DefaultMetadataService; import org.apache.hadoop.metadata.services.DefaultMetadataService;
import org.apache.hadoop.metadata.services.GraphBackedMetadataRepository; import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.services.GraphProvider; import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.services.GraphService; import org.apache.hadoop.metadata.repository.graph.GraphService;
import org.apache.hadoop.metadata.services.GraphServiceConfigurator; import org.apache.hadoop.metadata.repository.graph.GraphServiceConfigurator;
import org.apache.hadoop.metadata.services.MetadataRepository; import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.services.MetadataService; import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.services.TitanGraphProvider; import org.apache.hadoop.metadata.repository.graph.TitanGraphProvider;
/** /**
* Guice module for Repository module. * Guice module for Repository module.
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.listener;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.types.IDataType;
/**
* Typed instance change notification listener.
*/
public interface TypedInstanceChangeListener {
/**
* This is upon adding a new typed instance to the repository.
*
* @param typeName type name
* @param typedInstance a typed instance
* @throws org.apache.hadoop.metadata.MetadataException
*/
void onAdd(String typeName,
ITypedReferenceableInstance typedInstance) throws MetadataException;
}
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.repository;
import org.apache.hadoop.metadata.IReferenceableInstance; import org.apache.hadoop.metadata.IReferenceableInstance;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.repository.graph;
final class Constants {
private Constants() {
}
static final String GUID_PROPERTY_KEY = "GUID";
static final String ENTITY_TYPE_PROPERTY_KEY = "typeName";
static final String VERSION_PROPERTY_KEY = "version";
static final String TIMESTAMP_PROPERTY_KEY = "timestamp";
}
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.repository.graph;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.repository.graph;
import java.util.Set; import java.util.Set;
......
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.repository.graph;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
......
package org.apache.hadoop.metadata.util; /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.repository.graph;
import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Graph; import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.GraphQuery; import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONMode;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONUtility;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map;
/** /**
* Utility class for graph operations. * Utility class for graph operations.
...@@ -23,37 +35,37 @@ public final class GraphUtils { ...@@ -23,37 +35,37 @@ public final class GraphUtils {
private static final Logger LOG = LoggerFactory.getLogger(GraphUtils.class); private static final Logger LOG = LoggerFactory.getLogger(GraphUtils.class);
private static final String GUID_PROPERTY_KEY = "guid";
private static final String TIMESTAMP_PROPERTY_KEY = "timestamp";
private GraphUtils() { private GraphUtils() {
} }
public static Edge addEdge(Vertex fromVertex, Vertex toVertex, String edgeLabel) { public static Edge addEdge(Vertex fromVertex, Vertex toVertex,
return addEdge(fromVertex, toVertex, edgeLabel, null); String vertexPropertyKey, String edgeLabel) {
return addEdge(fromVertex, toVertex, vertexPropertyKey, edgeLabel, null);
} }
public static Edge addEdge(Vertex fromVertex, Vertex toVertex, public static Edge addEdge(Vertex fromVertex, Vertex toVertex,
String edgeLabel, String timestamp) { String vertexPropertyKey, String edgeLabel, String timestamp) {
Edge edge = findEdge(fromVertex, toVertex, edgeLabel); Edge edge = findEdge(fromVertex, toVertex, vertexPropertyKey, edgeLabel);
Edge edgeToVertex = edge != null ? edge : fromVertex.addEdge(edgeLabel, toVertex); Edge edgeToVertex = edge != null ? edge : fromVertex.addEdge(edgeLabel, toVertex);
if (timestamp != null) { if (timestamp != null) {
edgeToVertex.setProperty(TIMESTAMP_PROPERTY_KEY, timestamp); edgeToVertex.setProperty(Constants.TIMESTAMP_PROPERTY_KEY, timestamp);
} }
return edgeToVertex; return edgeToVertex;
} }
public static Edge findEdge(Vertex fromVertex, Vertex toVertex, String edgeLabel) { public static Edge findEdge(Vertex fromVertex, Vertex toVertex,
return findEdge(fromVertex, toVertex.getProperty(GUID_PROPERTY_KEY), edgeLabel); String vertexPropertyKey, String edgeLabel) {
return findEdge(fromVertex, toVertex.getProperty(vertexPropertyKey),
vertexPropertyKey, edgeLabel);
} }
public static Edge findEdge(Vertex fromVertex, Object toVertexName, String edgeLabel) { public static Edge findEdge(Vertex fromVertex, Object toVertexName,
String vertexPropertyKey, String edgeLabel) {
Edge edgeToFind = null; Edge edgeToFind = null;
for (Edge edge : fromVertex.getEdges(Direction.OUT, edgeLabel)) { for (Edge edge : fromVertex.getEdges(Direction.OUT, edgeLabel)) {
if (edge.getVertex(Direction.IN).getProperty( if (edge.getVertex(Direction.IN).getProperty(vertexPropertyKey).equals(toVertexName)) {
GUID_PROPERTY_KEY).equals(toVertexName)) {
edgeToFind = edge; edgeToFind = edge;
break; break;
} }
...@@ -63,36 +75,15 @@ public final class GraphUtils { ...@@ -63,36 +75,15 @@ public final class GraphUtils {
} }
public static Vertex findVertex(Graph blueprintsGraph, public static Vertex findVertex(Graph blueprintsGraph,
String guid) { String key, String value) {
LOG.debug("Finding vertex for: guid={}", guid); LOG.debug("Finding vertex for key={}, value={}", key, value);
GraphQuery query = blueprintsGraph.query().has("guid", guid);
Iterator<Vertex> results = query.vertices().iterator();
// returning one since name/type is unique
return results.hasNext() ? results.next() : null;
}
public static Vertex findVertex(Graph blueprintsGraph,
String entityName, String entityType) {
LOG.debug("Finding vertex for: name={}, type={}", entityName, entityType);
GraphQuery query = blueprintsGraph.query() GraphQuery query = blueprintsGraph.query().has(key, value);
.has("entityName", entityName)
.has("entityType", entityType);
Iterator<Vertex> results = query.vertices().iterator(); Iterator<Vertex> results = query.vertices().iterator();
// returning one since name/type is unique // returning one since name/type is unique
return results.hasNext() ? results.next() : null; return results.hasNext() ? results.next() : null;
} }
public static Map<String, String> extractProperties(Vertex entityVertex) {
Map<String, String> properties = new HashMap<>();
for (String key : entityVertex.getPropertyKeys()) {
properties.put(key, String.valueOf(entityVertex.getProperty(key)));
}
return properties;
}
public static String vertexString(final Vertex vertex) { public static String vertexString(final Vertex vertex) {
StringBuilder properties = new StringBuilder(); StringBuilder properties = new StringBuilder();
for (String propertyKey : vertex.getPropertyKeys()) { for (String propertyKey : vertex.getPropertyKeys()) {
...@@ -104,10 +95,6 @@ public final class GraphUtils { ...@@ -104,10 +95,6 @@ public final class GraphUtils {
return "v[" + vertex.getId() + "], Properties[" + properties + "]"; return "v[" + vertex.getId() + "], Properties[" + properties + "]";
} }
public static JSONObject vertexJSON(final Vertex vertex) throws JSONException {
return GraphSONUtility.jsonFromElement(vertex, null, GraphSONMode.NORMAL);
}
public static String edgeString(final Edge edge) { public static String edgeString(final Edge edge) {
return "e[" + edge.getLabel() + "], [" return "e[" + edge.getLabel() + "], ["
+ edge.getVertex(Direction.OUT).getProperty("name") + edge.getVertex(Direction.OUT).getProperty("name")
...@@ -115,4 +102,18 @@ public final class GraphUtils { ...@@ -115,4 +102,18 @@ public final class GraphUtils {
+ edge.getVertex(Direction.IN).getProperty("name") + edge.getVertex(Direction.IN).getProperty("name")
+ "]"; + "]";
} }
public static void dumpToLog(final Graph graph) {
LOG.debug("Vertices of {}", graph);
for (Vertex vertex : graph.getVertices()) {
LOG.debug(vertexString(vertex));
System.out.println(vertexString(vertex));
}
LOG.debug("Edges of {}", graph);
for (Edge edge : graph.getEdges()) {
LOG.debug(edgeString(edge));
System.out.println(edgeString(edge));
}
}
} }
\ No newline at end of file
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.repository.graph;
import java.util.Properties; import java.util.Properties;
......
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.repository.graph;
import javax.inject.Singleton; import javax.inject.Singleton;
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.repository.graph;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
......
...@@ -24,7 +24,9 @@ import org.apache.hadoop.metadata.MetadataException; ...@@ -24,7 +24,9 @@ import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.TypesDef; import org.apache.hadoop.metadata.TypesDef;
import org.apache.hadoop.metadata.json.Serialization$; import org.apache.hadoop.metadata.json.Serialization$;
import org.apache.hadoop.metadata.json.TypesSerialization; import org.apache.hadoop.metadata.json.TypesSerialization;
import org.apache.hadoop.metadata.listener.TypedInstanceChangeListener;
import org.apache.hadoop.metadata.listener.TypesChangeListener; import org.apache.hadoop.metadata.listener.TypesChangeListener;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.types.IDataType; import org.apache.hadoop.metadata.types.IDataType;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
...@@ -47,6 +49,8 @@ public class DefaultMetadataService implements MetadataService { ...@@ -47,6 +49,8 @@ public class DefaultMetadataService implements MetadataService {
LoggerFactory.getLogger(DefaultMetadataService.class); LoggerFactory.getLogger(DefaultMetadataService.class);
private final Set<TypesChangeListener> typesChangeListeners = new LinkedHashSet<>(); private final Set<TypesChangeListener> typesChangeListeners = new LinkedHashSet<>();
private final Set<TypedInstanceChangeListener> typedInstanceChangeListeners
= new LinkedHashSet<>();
private final TypeSystem typeSystem; private final TypeSystem typeSystem;
private final MetadataRepository repository; private final MetadataRepository repository;
...@@ -147,7 +151,11 @@ public class DefaultMetadataService implements MetadataService { ...@@ -147,7 +151,11 @@ public class DefaultMetadataService implements MetadataService {
ITypedReferenceableInstance entityInstance = ITypedReferenceableInstance entityInstance =
Serialization$.MODULE$.fromJson(entityDefinition); Serialization$.MODULE$.fromJson(entityDefinition);
return repository.createEntity(entityInstance, entityType); final String guid = repository.createEntity(entityInstance, entityType);
onAdd(entityType, entityInstance);
return guid;
} catch (ParseException e) { } catch (ParseException e) {
LOG.error("Unable to parse JSON {} for type {}", entityDefinition, entityType, e); LOG.error("Unable to parse JSON {} for type {}", entityDefinition, entityType, e);
throw new MetadataException("validation failed for: " + entityType); throw new MetadataException("validation failed for: " + entityType);
...@@ -204,6 +212,21 @@ public class DefaultMetadataService implements MetadataService { ...@@ -204,6 +212,21 @@ public class DefaultMetadataService implements MetadataService {
typesChangeListeners.remove(listener); typesChangeListeners.remove(listener);
} }
private void onAdd(String typeName,
ITypedReferenceableInstance typedInstance) throws MetadataException {
for (TypedInstanceChangeListener listener : typedInstanceChangeListeners) {
listener.onAdd(typeName, typedInstance);
}
}
public void registerListener(TypedInstanceChangeListener listener) {
typedInstanceChangeListeners.add(listener);
}
public void unregisterListener(TypedInstanceChangeListener listener) {
typedInstanceChangeListeners.remove(listener);
}
/** /**
* Starts the service. This method blocks until the service has completely started. * Starts the service. This method blocks until the service has completely started.
* *
......
...@@ -2,7 +2,7 @@ package org.apache.hadoop.metadata; ...@@ -2,7 +2,7 @@ package org.apache.hadoop.metadata;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.hadoop.metadata.services.GraphService; import org.apache.hadoop.metadata.repository.graph.GraphService;
import org.testng.annotations.AfterClass; import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
......
...@@ -6,6 +6,8 @@ import org.apache.hadoop.metadata.ITypedReferenceableInstance; ...@@ -6,6 +6,8 @@ import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.Referenceable; import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.RepositoryModuleBaseTest; import org.apache.hadoop.metadata.RepositoryModuleBaseTest;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.TitanGraphService;
import org.apache.hadoop.metadata.storage.IRepository; import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.storage.memory.MemRepository; import org.apache.hadoop.metadata.storage.memory.MemRepository;
import org.apache.hadoop.metadata.types.AttributeDefinition; import org.apache.hadoop.metadata.types.AttributeDefinition;
...@@ -18,7 +20,7 @@ import org.apache.hadoop.metadata.types.Multiplicity; ...@@ -18,7 +20,7 @@ import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructTypeDefinition; import org.apache.hadoop.metadata.types.StructTypeDefinition;
import org.apache.hadoop.metadata.types.TraitType; import org.apache.hadoop.metadata.types.TraitType;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
import org.apache.hadoop.metadata.util.GraphUtils; import org.apache.hadoop.metadata.repository.graph.GraphUtils;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.AfterClass; import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
......
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.services;
import org.apache.hadoop.metadata.RepositoryModuleBaseTest; import org.apache.hadoop.metadata.RepositoryModuleBaseTest;
import org.apache.hadoop.metadata.repository.graph.TitanGraphService;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.AfterClass; import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
# #
# GraphService implementation # GraphService implementation
metadata.graph.impl.class=org.apache.hadoop.metadata.services.TitanGraphService metadata.graph.impl.class=org.apache.hadoop.metadata.repository.graph.TitanGraphService
# Graph implementation # Graph implementation
......
...@@ -4,7 +4,7 @@ import java.util.HashMap; ...@@ -4,7 +4,7 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.metadata.RepositoryMetadataModule; import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.services.GraphService; import org.apache.hadoop.metadata.repository.graph.GraphService;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
......
...@@ -35,7 +35,7 @@ import javax.ws.rs.core.MediaType; ...@@ -35,7 +35,7 @@ import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.services.GraphService; import org.apache.hadoop.metadata.repository.graph.GraphService;
import org.apache.hadoop.metadata.web.util.Servlets; import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
######### Graph Database Configs ######### ######### Graph Database Configs #########
# Graph implementation # Graph implementation
metadata.graph.impl.class=org.apache.hadoop.metadata.services.TitanGraphService metadata.graph.impl.class=org.apache.hadoop.metadata.repository.graph.TitanGraphService
# Graph Storage # Graph Storage
metadata.graph.storage.backend=berkeleyje metadata.graph.storage.backend=berkeleyje
......
...@@ -56,6 +56,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -56,6 +56,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
private static final String TABLE_NAME = "bar"; private static final String TABLE_NAME = "bar";
private static final String TRAIT_TYPE = "hive_fetl"; private static final String TRAIT_TYPE = "hive_fetl";
private String tableInstanceAsJSON;
private String guid; private String guid;
@BeforeClass @BeforeClass
...@@ -70,7 +71,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -70,7 +71,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
public void testSubmitEntity() throws Exception { public void testSubmitEntity() throws Exception {
ITypedReferenceableInstance tableInstance = createHiveTableInstance(); ITypedReferenceableInstance tableInstance = createHiveTableInstance();
String instanceAsJSON = Serialization$.MODULE$.toJson(tableInstance); tableInstanceAsJSON = Serialization$.MODULE$.toJson(tableInstance);
WebResource resource = service WebResource resource = service
.path("api/metadata/entities/submit") .path("api/metadata/entities/submit")
...@@ -79,7 +80,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -79,7 +80,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
ClientResponse clientResponse = resource ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, instanceAsJSON); .method(HttpMethod.POST, ClientResponse.class, tableInstanceAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
...@@ -99,7 +100,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -99,7 +100,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
} }
@Test (dependsOnMethods = "testSubmitEntity") @Test (dependsOnMethods = "testSubmitEntity")
public void testGetEntityDefinition() { public void testGetEntityDefinition() throws Exception {
WebResource resource = service WebResource resource = service
.path("api/metadata/entities/definition") .path("api/metadata/entities/definition")
.path(guid); .path(guid);
...@@ -109,8 +110,19 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -109,8 +110,19 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class); .method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String response = clientResponse.getEntity(String.class);
System.out.println("response = " + response); String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId"));
final String definition = response.getString("definition");
Assert.assertNotNull(definition);
System.out.println("definition = " + definition);
System.out.println("tableInstanceAsJSON = " + tableInstanceAsJSON);
// Assert.assertEquals(definition, tableInstanceAsJSON);
} }
@Test @Test
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
# #
# GraphService implementation # GraphService implementation
metadata.graph.impl.class=org.apache.hadoop.metadata.services.TitanGraphService metadata.graph.impl.class=org.apache.hadoop.metadata.repository.graph.TitanGraphService
# Graph Storage # Graph Storage
metadata.graph.storage.backend=inmemory metadata.graph.storage.backend=inmemory
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment