Commit 39c84bd6 by Venkatesh Seetharam

Move search to Discovery interface from repository. Contributed by Venkatesh Seetharam

parent 18981168
......@@ -18,78 +18,257 @@
package org.apache.hadoop.metadata.discovery;
import com.google.common.base.Preconditions;
import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.collections.iterators.IteratorChain;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.repository.graph.TitanGraphService;
import org.apache.hadoop.metadata.storage.RepositoryException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.script.Bindings;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class GraphBackedDiscoveryService implements DiscoveryService {
private final MetadataRepository repository;
private static final Logger LOG = LoggerFactory.getLogger(GraphBackedDiscoveryService.class);
private final TitanGraph titanGraph;
@Inject
GraphBackedDiscoveryService(MetadataRepository repository) throws MetadataException {
this.repository = repository;
GraphBackedDiscoveryService(TitanGraphService graphService) throws MetadataException {
this.titanGraph = graphService.getTitanGraph();
}
/**
* Assumes the User is familiar with the persistence structure of the Repository.
* The given query is run uninterpreted against the underlying Graph Store.
* The results are returned as a List of Rows. each row is a Map of Key,Value pairs.
*
* @param gremlinQuery query in gremlin dsl format
* @return List of Maps
* @throws org.apache.hadoop.metadata.MetadataException
*/
@Override
public List<Map<String, String>> searchByGremlin(String gremlinQuery) throws MetadataException {
Preconditions.checkNotNull(gremlinQuery, "gremlin query name cannot be null");
// simple pass-through
return repository.searchByGremlin(gremlinQuery);
private static void searchWalker (Vertex vtx, final int max, int counter,
HashMap<String,JSONObject> e,
HashMap<String,JSONObject> v, String edgesToFollow) {
counter++;
if (counter <= max) {
Map<String,String> jsonVertexMap = new HashMap<>();
Iterator<Edge> edgeIterator;
// If we're doing a lineage traversal, only follow the edges specified by the query.
// Otherwise return them all.
if (edgesToFollow != null) {
IteratorChain ic = new IteratorChain();
for (String iterateOn: edgesToFollow.split(",")){
ic.addIterator(vtx.query().labels(iterateOn).edges().iterator());
}
edgeIterator = ic;
} else {
edgeIterator = vtx.query().edges().iterator();
}
//Iterator<Edge> edgeIterator = vtx.query().labels("Fathered").edges().iterator();
jsonVertexMap.put("HasRelationships", ((Boolean)edgeIterator.hasNext()).toString());
for (String pKey: vtx.getPropertyKeys()) {
jsonVertexMap.put(pKey, vtx.getProperty(pKey).toString());
}
// Add to the Vertex map.
v.put(vtx.getId().toString(), new JSONObject(jsonVertexMap));
// Follow this Vertex's edges
while (edgeIterator != null && edgeIterator.hasNext()) {
Edge edge = edgeIterator.next();
String label = edge.getLabel();
Map<String,String> jsonEdgeMap = new HashMap<>();
String tail = edge.getVertex(Direction.OUT).getId().toString();
String head = edge.getVertex(Direction.IN).getId().toString();
jsonEdgeMap.put("tail", tail);
jsonEdgeMap.put("head", head);
jsonEdgeMap.put("label", label);
Direction d;
if (tail.equals(vtx.getId().toString())) {
d = Direction.IN;
} else {
d = Direction.OUT;
}
/* If we want an Edge's property keys, uncomment here. Or we can parameterize it.
* Code is here now for reference/memory-jogging.
for (String pKey: edge.getPropertyKeys()) {
jsonEdgeMap.put(pKey, edge.getProperty(pKey).toString());
}
*/
e.put(edge.getId().toString(), new JSONObject(jsonEdgeMap));
searchWalker (edge.getVertex(d), max, counter, e, v, edgesToFollow);
}
}
}
/**
/*
* Simple direct graph search and depth traversal.
* @param searchText is plain text
* @param prop is the Vertex property to search.
*/
@Override
public Map<String, HashMap<String,JSONObject>> textSearch(String searchText, int depth, String prop) {
Preconditions.checkNotNull(searchText, "Invalid argument: \"text\" cannot be null.");
Preconditions.checkNotNull(prop, "Invalid argument: \"prop\" cannot be null.");
return repository.textSearch(searchText, depth, prop);
public Map<String,HashMap<String,JSONObject>> textSearch(String searchText,
int depth, String prop) {
HashMap<String,HashMap<String,JSONObject>> result = new HashMap<>();
// HashMaps, which contain sub JOSN Objects to be relayed back to the parent.
HashMap<String,JSONObject> vertices = new HashMap<>();
HashMap<String,JSONObject> edges = new HashMap<>();
/* todo: Later - when we allow search limitation by "type".
ArrayList<String> typesList = new ArrayList<String>();
for (String s: types.split(",")) {
// Types validity check.
if (typesList.contains(s)) {
LOG.error("Specifyed type is not a member of the Type System= {}", s);
throw new WebApplicationException(
Servlets.getErrorResponse("Invalid type specified in query.", Response.Status.INTERNAL_SERVER_ERROR));
}
typesList.add(s);
}*/
int resultCount = 0;
//for (Result<Vertex> v: g.indexQuery(Constants.VERTEX_INDEX, "v." + prop + ":(" + searchText + ")").vertices()) {
for (Vertex v: ((GraphQuery) titanGraph.query().has(prop, searchText)).vertices()) {
//searchWalker(v.getElement(), depth, 0, edges, vertices, null);
searchWalker(v, depth, 0, edges, vertices, null);
resultCount++;
}
LOG.debug("Search for {} returned {} results.", searchText ,resultCount);
result.put("vertices", vertices);
result.put("edges",edges);
return result;
}
/**
/*
* Simple graph walker for search interface, which allows following of specific edges only.
* @param edgesToFollow is a comma-separated-list of edges to follow.
*/
@Override
public Map<String, HashMap<String,JSONObject>> relationshipWalk(String guid, int depth, String edgesToFollow) {
Preconditions.checkNotNull(guid, "Invalid argument: \"guid\" cannot be null.");
Preconditions.checkNotNull(edgesToFollow, "Invalid argument: \"edgesToFollow\" cannot be null.");
return repository.relationshipWalk(guid, depth, edgesToFollow);
public Map<String,HashMap<String,JSONObject>> relationshipWalk(String guid, int depth,
String edgesToFollow) {
HashMap<String,HashMap<String,JSONObject>> result = new HashMap<>();
// HashMaps, which contain sub JOSN Objects to be relayed back to the parent.
HashMap<String,JSONObject> vertices = new HashMap<>();
HashMap<String,JSONObject> edges = new HashMap<>();
// Get the Vertex with the specified GUID.
Vertex v = GraphHelper.findVertexByGUID(titanGraph, guid);
if (v != null) {
searchWalker(v, depth, 0, edges, vertices, edgesToFollow);
LOG.debug("Vertex {} found for guid {}", v, guid);
} else {
LOG.debug("Vertex not found for guid {}", guid);
}
result.put("vertices", vertices);
result.put("edges",edges);
return result;
}
/**
* Assumes the User is familiar with the persistence structure of the Repository.
* The given query is run uninterpreted against the underlying Graph Store.
* The results are returned as a List of Rows. each row is a Map of Key,Value pairs.
*
* @param gremlinQuery query in gremlin dsl format
* @return List of Maps
* @throws org.apache.hadoop.metadata.MetadataException
*/
@Override
public List<Map<String, String>> searchByGremlin(String gremlinQuery) throws MetadataException {
ScriptEngineManager manager = new ScriptEngineManager();
ScriptEngine engine = manager.getEngineByName("gremlin-groovy");
Bindings bindings = engine.createBindings();
bindings.put("g", titanGraph);
try {
Object o = engine.eval(gremlinQuery, bindings);
if ( !(o instanceof List)) {
throw new RepositoryException(String.format("Cannot process gremlin result %s", o.toString()));
}
List l = (List) o;
List<Map<String,String>> result = new ArrayList<>();
for(Object r : l) {
Map<String,String> oRow = new HashMap<>();
if ( r instanceof Map ) {
@SuppressWarnings("unchecked")
Map<Object,Object> iRow = (Map) r;
for(Map.Entry e : iRow.entrySet()) {
Object k = e.getKey();
Object v = e.getValue();
oRow.put(k.toString(), v.toString());
}
} else if ( r instanceof TitanVertex) {
Iterable<TitanProperty> ps = ((TitanVertex)r).getProperties();
for(TitanProperty tP : ps) {
String pName = tP.getPropertyKey().getName();
Object pValue = ((TitanVertex)r).getProperty(pName);
if ( pValue != null ) {
oRow.put(pName, pValue.toString());
}
}
} else if ( r instanceof String ) {
oRow.put("", r.toString());
} else {
throw new RepositoryException(String.format("Cannot process gremlin result %s", o.toString()));
}
result.add(oRow);
}
return result;
}catch(ScriptException se) {
throw new RepositoryException(se);
}
}
/**
* Return a Set of indexed properties in the graph.
* No parameters.
*/
@Override
public Set<String> getGraphIndexedFields() {
return repository.getGraphIndexedFields();
return titanGraph.getIndexedKeys(Vertex.class);
}
}
......@@ -20,15 +20,10 @@ package org.apache.hadoop.metadata.repository;
import org.apache.hadoop.metadata.IReferenceableInstance;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.service.Service;
import org.apache.hadoop.metadata.storage.RepositoryException;
import org.codehaus.jettison.json.JSONObject;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* An interface for persisting metadata into a blueprints enabled graph db.
......@@ -41,34 +36,4 @@ public interface MetadataRepository extends Service {
ITypedReferenceableInstance getEntityDefinition(String guid) throws RepositoryException;
List<String> getEntityList(String entityType) throws RepositoryException;
/**
* Assumes the User is familiar with the persistence structure of the Repository.
* The given query is run uninterpreted against the underlying Graph Store.
* The results are returned as a List of Rows. each row is a Map of Key,Value pairs.
*
* @param gremlinQuery query in gremlin dsl format
* @return List of Maps
* @throws org.apache.hadoop.metadata.MetadataException
*/
List<Map<String,String>> searchByGremlin(String gremlinQuery) throws MetadataException;
/**
* Simple direct graph search and depth traversal.
* @param searchText is plain text
* @param prop is the Vertex property to search.
*/
Map<String, HashMap<String,JSONObject>> textSearch(String searchText, int depth, String prop);
/**
* Simple graph walker for search interface, which allows following of specific edges only.
* @param edgesToFollow is a comma-separated-list of edges to follow.
*/
Map<String, HashMap<String,JSONObject>> relationshipWalk(String guid, int depth, String edgesToFollow);
/**
* Return a Set of indexed properties in the graph.
* No parameters.
*/
Set<String> getGraphIndexedFields();
}
......@@ -19,15 +19,12 @@
package org.apache.hadoop.metadata.repository.graph;
import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.TitanIndexQuery.Result;
import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.collections.iterators.IteratorChain;
import org.apache.hadoop.metadata.IReferenceableInstance;
import org.apache.hadoop.metadata.ITypedInstance;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
......@@ -46,16 +43,10 @@ import org.apache.hadoop.metadata.types.ObjectGraphWalker;
import org.apache.hadoop.metadata.types.StructType;
import org.apache.hadoop.metadata.types.TraitType;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.script.Bindings;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
......@@ -65,7 +56,6 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
/**
......@@ -143,15 +133,6 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
}
/**
* Return a Set of indexed properties in the graph.
* No parameters.
*/
@Override
public Set<String> getGraphIndexedFields() {
return graphService.getIndexableGraph().getIndexedKeys(Vertex.class);
}
@Override
public ITypedReferenceableInstance getEntityDefinition(String guid) throws RepositoryException {
LOG.info("Retrieving entity with guid={}", guid);
......@@ -209,213 +190,6 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
*/
}
private static void searchWalker (Vertex vtx, final int max, int counter,
HashMap<String,JSONObject> e,
HashMap<String,JSONObject> v, String edgesToFollow) {
counter++;
if (counter <= max) {
Map<String,String> jsonVertexMap = new HashMap<>();
Iterator<Edge> edgeIterator;
// If we're doing a lineage traversal, only follow the edges specified by the query.
// Otherwise return them all.
if (edgesToFollow != null) {
IteratorChain ic = new IteratorChain();
for (String iterateOn: edgesToFollow.split(",")){
ic.addIterator(vtx.query().labels(iterateOn).edges().iterator());
}
edgeIterator = ic;
} else {
edgeIterator = vtx.query().edges().iterator();
}
//Iterator<Edge> edgeIterator = vtx.query().labels("Fathered").edges().iterator();
jsonVertexMap.put("HasRelationships", ((Boolean)edgeIterator.hasNext()).toString());
for (String pKey: vtx.getPropertyKeys()) {
jsonVertexMap.put(pKey, vtx.getProperty(pKey).toString());
}
// Add to the Vertex map.
v.put(vtx.getId().toString(), new JSONObject(jsonVertexMap));
// Follow this Vertex's edges
while (edgeIterator != null && edgeIterator.hasNext()) {
Edge edge = edgeIterator.next();
String label = edge.getLabel();
Map<String,String> jsonEdgeMap = new HashMap<>();
String tail = edge.getVertex(Direction.OUT).getId().toString();
String head = edge.getVertex(Direction.IN).getId().toString();
jsonEdgeMap.put("tail", tail);
jsonEdgeMap.put("head", head);
jsonEdgeMap.put("label", label);
Direction d;
if (tail.equals(vtx.getId().toString())) {
d = Direction.IN;
} else {
d = Direction.OUT;
}
/* If we want an Edge's property keys, uncomment here. Or we can parameterize it.
* Code is here now for reference/memory-jogging.
for (String pKey: edge.getPropertyKeys()) {
jsonEdgeMap.put(pKey, edge.getProperty(pKey).toString());
}
*/
e.put(edge.getId().toString(), new JSONObject(jsonEdgeMap));
searchWalker (edge.getVertex(d), max, counter, e, v, edgesToFollow);
}
}
}
/*
* Simple direct graph search and depth traversal.
* @param searchText is plain text
* @param prop is the Vertex property to search.
*/
@Override
public Map<String,HashMap<String,JSONObject>> textSearch(String searchText,
int depth, String prop) {
TitanGraph g = (TitanGraph)graphService.getBlueprintsGraph();
HashMap<String,HashMap<String,JSONObject>> result = new HashMap<>();
// HashMaps, which contain sub JOSN Objects to be relayed back to the parent.
HashMap<String,JSONObject> vertices = new HashMap<>();
HashMap<String,JSONObject> edges = new HashMap<>();
/* todo: Later - when we allow search limitation by "type".
ArrayList<String> typesList = new ArrayList<String>();
for (String s: types.split(",")) {
// Types validity check.
if (typesList.contains(s)) {
LOG.error("Specifyed type is not a member of the Type System= {}", s);
throw new WebApplicationException(
Servlets.getErrorResponse("Invalid type specified in query.", Response.Status.INTERNAL_SERVER_ERROR));
}
typesList.add(s);
}*/
int resultCount = 0;
//for (Result<Vertex> v: g.indexQuery(Constants.VERTEX_INDEX, "v." + prop + ":(" + searchText + ")").vertices()) {
for (Vertex v: graphService.getBlueprintsGraph().query().has(prop,searchText).vertices()) {
//searchWalker(v.getElement(), depth, 0, edges, vertices, null);
searchWalker(v, depth, 0, edges, vertices, null);
resultCount++;
}
LOG.debug("Search for {} returned {} results.", searchText ,resultCount);
result.put("vertices", vertices);
result.put("edges",edges);
return result;
}
/*
* Simple graph walker for search interface, which allows following of specific edges only.
* @param edgesToFollow is a comma-separated-list of edges to follow.
*/
@Override
public Map<String,HashMap<String,JSONObject>> relationshipWalk(String guid, int depth,
String edgesToFollow) {
HashMap<String,HashMap<String,JSONObject>> result = new HashMap<>();
// HashMaps, which contain sub JOSN Objects to be relayed back to the parent.
HashMap<String,JSONObject> vertices = new HashMap<>();
HashMap<String,JSONObject> edges = new HashMap<>();
// Get the Vertex with the specified GUID.
Vertex v = GraphHelper.findVertexByGUID(titanGraph, guid);
if (v != null) {
searchWalker(v, depth, 0, edges, vertices, edgesToFollow);
LOG.debug("Vertex {} found for guid {}", v, guid);
} else {
LOG.debug("Vertex not found for guid {}", guid);
}
result.put("vertices", vertices);
result.put("edges",edges);
return result;
}
/**
* Assumes the User is familiar with the persistence structure of the Repository.
* The given query is run uninterpreted against the underlying Graph Store.
* The results are returned as a List of Rows. each row is a Map of Key,Value pairs.
*
* @param gremlinQuery query in gremlin dsl format
* @return List of Maps
* @throws org.apache.hadoop.metadata.MetadataException
*/
@Override
public List<Map<String, String>> searchByGremlin(String gremlinQuery) throws MetadataException {
ScriptEngineManager manager = new ScriptEngineManager();
ScriptEngine engine = manager.getEngineByName("gremlin-groovy");
Bindings bindings = engine.createBindings();
bindings.put("g", graphService.getTransactionalGraph());
try {
Object o = engine.eval(gremlinQuery, bindings);
if ( !(o instanceof List)) {
throw new RepositoryException(String.format("Cannot process gremlin result %s", o.toString()));
}
List l = (List) o;
List<Map<String,String>> result = new ArrayList<>();
for(Object r : l) {
Map<String,String> oRow = new HashMap<>();
if ( r instanceof Map ) {
@SuppressWarnings("unchecked")
Map<Object,Object> iRow = (Map) r;
for(Map.Entry e : iRow.entrySet()) {
Object k = e.getKey();
Object v = e.getValue();
oRow.put(k.toString(), v.toString());
}
} else if ( r instanceof TitanVertex) {
Iterable<TitanProperty> ps = ((TitanVertex)r).getProperties();
for(TitanProperty tP : ps) {
String pName = tP.getPropertyKey().getName();
Object pValue = ((TitanVertex)r).getProperty(pName);
if ( pValue != null ) {
oRow.put(pName, pValue.toString());
}
}
} else if ( r instanceof String ) {
oRow.put("", r.toString());
} else {
throw new RepositoryException(String.format("Cannot process gremlin result %s", o.toString()));
}
result.add(oRow);
}
return result;
}catch(ScriptException se) {
throw new RepositoryException(se);
}
}
private final class EntityProcessor implements ObjectGraphWalker.NodeProcessor {
public final Map<Id, Id> idToNewIdMap;
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.DataTypes;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.IDataType;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructTypeDefinition;
import org.apache.hadoop.metadata.types.TraitType;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.testng.Assert;
/**
* Test utility class.
*/
public final class TestUtils {
private TestUtils() {
}
/**
* Class Hierarchy is:
* Department(name : String, employees : Array[Person])
* Person(name : String, department : Department, manager : Manager)
* Manager(subordinates : Array[Person]) extends Person
* <p/>
* Persons can have SecurityClearance(level : Int) clearance.
*/
public static void defineDeptEmployeeTypes(TypeSystem ts) throws MetadataException {
HierarchicalTypeDefinition<ClassType> deptTypeDef =
createClassTypeDef("Department", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true,
"department")
);
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person",
ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department",
"Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager",
"Manager", Multiplicity.OPTIONAL, false, "subordinates")
);
HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager",
ImmutableList.of("Person"),
new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, false,
"manager")
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = createTraitTypeDef(
"SecurityClearance",
ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(securityClearanceTypeDef),
ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
}
public static Referenceable createDeptEg1(TypeSystem ts) throws MetadataException {
Referenceable hrDept = new Referenceable("Department");
Referenceable john = new Referenceable("Person");
Referenceable jane = new Referenceable("Manager", "SecurityClearance");
hrDept.set("name", "hr");
john.set("name", "John");
john.set("department", hrDept);
jane.set("name", "Jane");
jane.set("department", hrDept);
john.set("manager", jane);
hrDept.set("employees", ImmutableList.of(john, jane));
jane.set("subordinates", ImmutableList.of(john));
jane.getTrait("SecurityClearance").set("level", 1);
ClassType deptType = ts.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
Assert.assertNotNull(hrDept2);
return hrDept;
}
public static AttributeDefinition createRequiredAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.REQUIRED, false,
null);
}
@SuppressWarnings("unchecked")
public static HierarchicalTypeDefinition<TraitType> createTraitTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(TraitType.class, name, superTypes, attrDefs);
}
@SuppressWarnings("unchecked")
public static HierarchicalTypeDefinition<ClassType> createClassTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(ClassType.class, name, superTypes, attrDefs);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.discovery;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.TestUtils;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
import javax.inject.Inject;
@Guice(modules = RepositoryMetadataModule.class)
public class GraphBackedDiscoveryServiceTest {
private TypeSystem typeSystem;
@Inject
private GraphBackedMetadataRepository repositoryService;
@Inject
private GraphBackedDiscoveryService discoveryService;
@BeforeMethod
public void setUp() throws Exception {
typeSystem = TypeSystem.getInstance();
}
@AfterMethod
public void tearDown() throws Exception {
}
@Test
public void testRawSearch1() throws Exception {
Referenceable hrDept = TestUtils.createDeptEg1(typeSystem);
ClassType deptType = typeSystem.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
repositoryService.createEntity(hrDept2, "Department");
// Query for all Vertices in Graph
Object r = discoveryService.searchByGremlin("g.V.toList()");
System.out.println("search result = " + r);
// Query for all Vertices of a Type
r = discoveryService.searchByGremlin("g.V.filter{it.typeName == 'Department'}.toList()");
System.out.println("search result = " + r);
// Property Query: list all Person names
r = discoveryService.searchByGremlin("g.V.filter{it.typeName == 'Person'}.'Person.name'.toList()");
System.out.println("search result = " + r);
}
@Test
public void testTextSearch() throws Exception {
}
@Test
public void testRelationshipWalk() throws Exception {
}
}
\ No newline at end of file
......@@ -18,23 +18,16 @@
package org.apache.hadoop.metadata.repository.graph;
import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.TestUtils;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.DataTypes;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.IDataType;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructTypeDefinition;
import org.apache.hadoop.metadata.types.TraitType;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
......@@ -74,12 +67,12 @@ public class GraphBackedMetadataRepositoryTest {
ts = TypeSystem.getInstance();
defineDeptEmployeeTypes(ts);
TestUtils.defineDeptEmployeeTypes(ts);
}
@Test
public void testSubmitEntity() throws Exception {
Referenceable hrDept = createDeptEg1(ts);
Referenceable hrDept = TestUtils.createDeptEg1(ts);
ClassType deptType = ts.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
......@@ -118,113 +111,4 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertNotNull(entityList);
Assert.assertEquals(entityList.size(), 1); // one department
}
@Test
public void testRawSearch1() throws Exception {
Referenceable hrDept = createDeptEg1(ts);
ClassType deptType = ts.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
guid = repositoryService.createEntity(hrDept2, ENTITY_TYPE);
// Query for all Vertices in Graph
Object r = repositoryService.searchByGremlin("g.V.toList()");
System.out.println("search result = " + r);
// Query for all Vertices of a Type
r = repositoryService.searchByGremlin("g.V.filter{it.typeName == 'Department'}.toList()");
System.out.println("search result = " + r);
// Property Query: list all Person names
r = repositoryService.searchByGremlin("g.V.filter{it.typeName == 'Person'}.'Person.name'.toList()");
System.out.println("search result = " + r);
}
/*
* Class Hierarchy is:
* Department(name : String, employees : Array[Person])
* Person(name : String, department : Department, manager : Manager)
* Manager(subordinates : Array[Person]) extends Person
*
* Persons can have SecurityClearance(level : Int) clearance.
*/
protected void defineDeptEmployeeTypes(TypeSystem ts) throws MetadataException {
HierarchicalTypeDefinition<ClassType> deptTypeDef =
createClassTypeDef("Department", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true,
"department")
);
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person",
ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department",
"Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager",
"Manager", Multiplicity.OPTIONAL, false, "subordinates")
);
HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager",
ImmutableList.of("Person"),
new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, false,
"manager")
);
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = createTraitTypeDef(
"SecurityClearance",
ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE)
);
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(securityClearanceTypeDef),
ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
}
protected Referenceable createDeptEg1(TypeSystem ts) throws MetadataException {
Referenceable hrDept = new Referenceable("Department");
Referenceable john = new Referenceable("Person");
Referenceable jane = new Referenceable("Manager", "SecurityClearance");
hrDept.set("name", "hr");
john.set("name", "John");
john.set("department", hrDept);
jane.set("name", "Jane");
jane.set("department", hrDept);
john.set("manager", jane);
hrDept.set("employees", ImmutableList.of(john, jane));
jane.set("subordinates", ImmutableList.of(john));
jane.getTrait("SecurityClearance").set("level", 1);
ClassType deptType = ts.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
Assert.assertNotNull(hrDept2);
return hrDept;
}
public static AttributeDefinition createRequiredAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.REQUIRED, false, null);
}
@SuppressWarnings("unchecked")
protected HierarchicalTypeDefinition<TraitType> createTraitTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(TraitType.class, name, superTypes, attrDefs);
}
@SuppressWarnings("unchecked")
protected HierarchicalTypeDefinition<ClassType> createClassTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(ClassType.class, name, superTypes, attrDefs);
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment