Commit 5ffad328 by Venkatesh Seetharam

Fix bug with mapping existing references, minor refactoring. Contributed by Venkatesh Seetharam

parent dde33712
...@@ -74,6 +74,25 @@ ...@@ -74,6 +74,25 @@
<groupId>log4j</groupId> <groupId>log4j</groupId>
<artifactId>log4j</artifactId> <artifactId>log4j</artifactId>
</dependency> </dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</dependency>
</dependencies> </dependencies>
</project> </project>
\ No newline at end of file
...@@ -19,8 +19,19 @@ ...@@ -19,8 +19,19 @@
package org.apache.hadoop.metadata.hivetypes; package org.apache.hadoop.metadata.hivetypes;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.metadata.*; import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.ITypedStruct;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.Struct;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.storage.IRepository; import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.storage.Id; import org.apache.hadoop.metadata.storage.Id;
...@@ -28,23 +39,18 @@ import org.apache.hadoop.metadata.storage.RepositoryException; ...@@ -28,23 +39,18 @@ import org.apache.hadoop.metadata.storage.RepositoryException;
import org.apache.hadoop.metadata.types.IDataType; import org.apache.hadoop.metadata.types.IDataType;
import org.apache.hadoop.metadata.types.Multiplicity; import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructType; import org.apache.hadoop.metadata.types.StructType;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.apache.thrift.TException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
;
public class HiveImporter { public class HiveImporter {
private final HiveMetaStoreClient hiveMetastoreClient; private final HiveMetaStoreClient hiveMetastoreClient;
private static final Logger LOG = private static final Logger LOG =
LoggerFactory.getLogger(HiveImporter.class); LoggerFactory.getLogger(HiveImporter.class);
private TypeSystem typeSystem;
private IRepository repository; private IRepository repository;
private MetadataRepository graphRepository; private MetadataRepository graphRepository;
private HiveTypeSystem hiveTypeSystem; private HiveTypeSystem hiveTypeSystem;
...@@ -68,7 +74,6 @@ public class HiveImporter { ...@@ -68,7 +74,6 @@ public class HiveImporter {
public HiveImporter(IRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException { public HiveImporter(IRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException {
this(hts, hmc); this(hts, hmc);
if (repo == null) { if (repo == null) {
LOG.error("repository is null"); LOG.error("repository is null");
throw new RuntimeException("repository is null"); throw new RuntimeException("repository is null");
...@@ -77,13 +82,11 @@ public class HiveImporter { ...@@ -77,13 +82,11 @@ public class HiveImporter {
repository = repo; repository = repo;
repository.defineTypes(hts.getHierarchicalTypeDefinitions()); repository.defineTypes(hts.getHierarchicalTypeDefinitions());
} }
private HiveImporter(HiveTypeSystem hts, HiveMetaStoreClient hmc) { private HiveImporter(HiveTypeSystem hts, HiveMetaStoreClient hmc) {
this.hiveMetastoreClient = hmc; this.hiveMetastoreClient = hmc;
this.hiveTypeSystem = hts; this.hiveTypeSystem = hts;
typeSystem = TypeSystem.getInstance();
dbInstances = new ArrayList<>(); dbInstances = new ArrayList<>();
tableInstances = new ArrayList<>(); tableInstances = new ArrayList<>();
partitionInstances = new ArrayList<>(); partitionInstances = new ArrayList<>();
...@@ -119,18 +122,21 @@ public class HiveImporter { ...@@ -119,18 +122,21 @@ public class HiveImporter {
} }
} }
private ITypedReferenceableInstance createInstance(Referenceable ref) private Referenceable createInstance(Referenceable ref)
throws MetadataException { throws MetadataException {
if (repository != null) { if (repository != null) {
return repository.create(ref); return (Referenceable) repository.create(ref);
} else { } else {
String typeName = ref.getTypeName(); String typeName = ref.getTypeName();
IDataType dataType = hiveTypeSystem.getDataType(typeName); IDataType dataType = hiveTypeSystem.getDataType(typeName);
LOG.debug("creating instance of type " + typeName + " dataType " + dataType); LOG.debug("creating instance of type " + typeName + " dataType " + dataType);
ITypedReferenceableInstance instance = ITypedReferenceableInstance instance =
(ITypedReferenceableInstance) dataType.convert(ref, Multiplicity.OPTIONAL); (ITypedReferenceableInstance) dataType.convert(ref, Multiplicity.OPTIONAL);
graphRepository.createEntity(instance, typeName); String guid = graphRepository.createEntity(instance, typeName);
return instance; System.out.println("creating instance of type " + typeName + " dataType " + dataType
+ ", guid: " + guid);
return new Referenceable(guid, ref.getTypeName(), ref.getValuesMap());
} }
} }
...@@ -146,17 +152,15 @@ public class HiveImporter { ...@@ -146,17 +152,15 @@ public class HiveImporter {
dbRef.set("parameters", hiveDB.getParameters()); dbRef.set("parameters", hiveDB.getParameters());
dbRef.set("ownerName", hiveDB.getOwnerName()); dbRef.set("ownerName", hiveDB.getOwnerName());
dbRef.set("ownerType", hiveDB.getOwnerType().getValue()); dbRef.set("ownerType", hiveDB.getOwnerType().getValue());
ITypedReferenceableInstance dbRefTyped = createInstance(dbRef); Referenceable dbRefTyped = createInstance(dbRef);
dbInstances.add(dbRefTyped.getId()); dbInstances.add(dbRefTyped.getId());
importTables(db, dbRefTyped); importTables(db, dbRefTyped);
} catch (NoSuchObjectException nsoe) { } catch (Exception e) {
throw new MetadataException(nsoe); throw new MetadataException(e);
} catch (TException te) {
throw new MetadataException(te);
} }
} }
private void importTables(String db, ITypedReferenceableInstance dbRefTyped) throws MetadataException { private void importTables(String db, Referenceable dbRefTyped) throws MetadataException {
try { try {
List<String> hiveTables = hiveMetastoreClient.getAllTables(db); List<String> hiveTables = hiveMetastoreClient.getAllTables(db);
...@@ -177,7 +181,7 @@ public class HiveImporter { ...@@ -177,7 +181,7 @@ public class HiveImporter {
ITypedStruct sdStruct = fillStorageDescStruct(storageDesc); ITypedStruct sdStruct = fillStorageDescStruct(storageDesc);
tableRef.set("sd", sdStruct); tableRef.set("sd", sdStruct);
tableRef.set("columns", sdStruct.get("cols")); tableRef.set("columns", sdStruct.get("cols"));
List<ITypedReferenceableInstance> partKeys = new ArrayList<>(); List<Referenceable> partKeys = new ArrayList<>();
Referenceable colRef; Referenceable colRef;
if (hiveTable.getPartitionKeysSize() > 0) { if (hiveTable.getPartitionKeysSize() > 0) {
for (FieldSchema fs : hiveTable.getPartitionKeys()) { for (FieldSchema fs : hiveTable.getPartitionKeys()) {
...@@ -185,7 +189,7 @@ public class HiveImporter { ...@@ -185,7 +189,7 @@ public class HiveImporter {
colRef.set("name", fs.getName()); colRef.set("name", fs.getName());
colRef.set("type", fs.getType()); colRef.set("type", fs.getType());
colRef.set("comment", fs.getComment()); colRef.set("comment", fs.getComment());
ITypedReferenceableInstance colRefTyped = createInstance(colRef); Referenceable colRefTyped = createInstance(colRef);
partKeys.add(colRefTyped); partKeys.add(colRefTyped);
} }
tableRef.set("partitionKeys", partKeys); tableRef.set("partitionKeys", partKeys);
...@@ -200,7 +204,7 @@ public class HiveImporter { ...@@ -200,7 +204,7 @@ public class HiveImporter {
tableRef.set("tableType", hiveTable.getTableType()); tableRef.set("tableType", hiveTable.getTableType());
tableRef.set("temporary", hiveTable.isTemporary()); tableRef.set("temporary", hiveTable.isTemporary());
ITypedReferenceableInstance tableRefTyped = createInstance(tableRef); Referenceable tableRefTyped = createInstance(tableRef);
tableInstances.add(tableRefTyped.getId()); tableInstances.add(tableRefTyped.getId());
...@@ -218,25 +222,22 @@ public class HiveImporter { ...@@ -218,25 +222,22 @@ public class HiveImporter {
partRef.set("sd", sdStruct); partRef.set("sd", sdStruct);
partRef.set("columns", sdStruct.get("cols")); partRef.set("columns", sdStruct.get("cols"));
partRef.set("parameters", hivePart.getParameters()); partRef.set("parameters", hivePart.getParameters());
ITypedReferenceableInstance partRefTyped = createInstance(partRef); Referenceable partRefTyped = createInstance(partRef);
partitionInstances.add(partRefTyped.getId()); partitionInstances.add(partRefTyped.getId());
} }
} }
} }
} catch (Exception te) {
} catch (NoSuchObjectException nsoe) {
throw new MetadataException(nsoe);
} catch (TException te) {
throw new MetadataException(te); throw new MetadataException(te);
} }
} }
private ITypedStruct fillStorageDescStruct(StorageDescriptor storageDesc) throws MetadataException { private ITypedStruct fillStorageDescStruct(StorageDescriptor storageDesc) throws Exception {
String storageDescName = HiveTypeSystem.DefinedTypes.HIVE_STORAGEDESC.name(); String storageDescName = HiveTypeSystem.DefinedTypes.HIVE_STORAGEDESC.name();
SerDeInfo serdeInfo = storageDesc.getSerdeInfo(); SerDeInfo serdeInfo = storageDesc.getSerdeInfo();
SkewedInfo skewedInfo = storageDesc.getSkewedInfo(); // SkewedInfo skewedInfo = storageDesc.getSkewedInfo();
Struct sdStruct = new Struct(storageDescName); Struct sdStruct = new Struct(storageDescName);
...@@ -275,7 +276,7 @@ public class HiveImporter { ...@@ -275,7 +276,7 @@ public class HiveImporter {
List<ITypedReferenceableInstance> fieldsList = new ArrayList<>(); List<Referenceable> fieldsList = new ArrayList<>();
Referenceable colRef; Referenceable colRef;
for (FieldSchema fs : storageDesc.getCols()) { for (FieldSchema fs : storageDesc.getCols()) {
LOG.debug("Processing field " + fs); LOG.debug("Processing field " + fs);
...@@ -283,7 +284,7 @@ public class HiveImporter { ...@@ -283,7 +284,7 @@ public class HiveImporter {
colRef.set("name", fs.getName()); colRef.set("name", fs.getName());
colRef.set("type", fs.getType()); colRef.set("type", fs.getType());
colRef.set("comment", fs.getComment()); colRef.set("comment", fs.getComment());
ITypedReferenceableInstance colRefTyped = createInstance(colRef); Referenceable colRefTyped = createInstance(colRef);
fieldsList.add(colRefTyped); fieldsList.add(colRefTyped);
columnInstances.add(colRefTyped.getId()); columnInstances.add(colRefTyped.getId());
} }
...@@ -315,8 +316,6 @@ public class HiveImporter { ...@@ -315,8 +316,6 @@ public class HiveImporter {
sdStruct.set("parameters", storageDesc.getParameters()); sdStruct.set("parameters", storageDesc.getParameters());
sdStruct.set("storedAsSubDirectories", storageDesc.isStoredAsSubDirectories()); sdStruct.set("storedAsSubDirectories", storageDesc.isStoredAsSubDirectories());
StructType storageDesctype = (StructType) hiveTypeSystem.getDataType(storageDescName); StructType storageDesctype = (StructType) hiveTypeSystem.getDataType(storageDescName);
ITypedStruct sdStructTyped = return storageDesctype.convert(sdStruct, Multiplicity.OPTIONAL);
storageDesctype.convert(sdStruct, Multiplicity.OPTIONAL);
return sdStructTyped;
} }
} }
...@@ -19,10 +19,19 @@ ...@@ -19,10 +19,19 @@
package org.apache.hadoop.metadata.hivetypes; package org.apache.hadoop.metadata.hivetypes;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.types.*; import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.DataTypes;
import org.apache.hadoop.metadata.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.types.EnumValue;
import org.apache.hadoop.metadata.types.HierarchicalType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.IDataType;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructTypeDefinition;
import org.apache.hadoop.metadata.types.TraitType;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -40,8 +49,6 @@ public class HiveTypeSystem { ...@@ -40,8 +49,6 @@ public class HiveTypeSystem {
public static final HiveTypeSystem instance = new HiveTypeSystem(); public static final HiveTypeSystem instance = new HiveTypeSystem();
} }
private TypeSystem typeSystem;
private boolean valid = false; private boolean valid = false;
public enum DefinedTypes { public enum DefinedTypes {
...@@ -85,7 +92,7 @@ public class HiveTypeSystem { ...@@ -85,7 +92,7 @@ public class HiveTypeSystem {
private List<IDataType> enumTypes; private List<IDataType> enumTypes;
private static Multiplicity ZeroOrMore = new Multiplicity(0, Integer.MAX_VALUE, true); // private static Multiplicity ZeroOrMore = new Multiplicity(0, Integer.MAX_VALUE, true);
private HiveTypeSystem() { private HiveTypeSystem() {
classTypeDefinitions = new HashMap<>(); classTypeDefinitions = new HashMap<>();
...@@ -98,7 +105,7 @@ public class HiveTypeSystem { ...@@ -98,7 +105,7 @@ public class HiveTypeSystem {
private void initialize() throws MetadataException { private void initialize() throws MetadataException {
LOG.info("Initializing the Hive Typesystem"); LOG.info("Initializing the Hive Typesystem");
typeSystem = TypeSystem.getInstance(); TypeSystem typeSystem = TypeSystem.getInstance();
mapStrToStrMap = mapStrToStrMap =
typeSystem.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE); typeSystem.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE);
...@@ -131,10 +138,16 @@ public class HiveTypeSystem { ...@@ -131,10 +138,16 @@ public class HiveTypeSystem {
} }
typeMap.putAll( typeMap.putAll(
typeSystem.defineTypes(getStructTypeDefinitions(), getTraitTypeDefinitions(), getClassTypeDefinitions())); typeSystem.defineTypes(getStructTypeDefinitions(), getTraitTypeDefinitions(),
getClassTypeDefinitions()));
valid = true; valid = true;
} }
Map<String, IDataType> getTypeMap() {
return typeMap;
}
public synchronized static HiveTypeSystem getInstance() throws MetadataException { public synchronized static HiveTypeSystem getInstance() throws MetadataException {
HiveTypeSystem hs = Holder.instance; HiveTypeSystem hs = Holder.instance;
......
...@@ -108,7 +108,7 @@ ...@@ -108,7 +108,7 @@
<property> <property>
<name>hive.metastore.uris</name> <name>hive.metastore.uris</name>
<value>thrift://localhost:9083</value> <value>thrift://10.10.11.207:9083</value>
</property> </property>
<property> <property>
......
...@@ -19,19 +19,20 @@ ...@@ -19,19 +19,20 @@
package org.apache.hadoop.metadata.hivetypes; package org.apache.hadoop.metadata.hivetypes;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository; import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.repository.graph.GraphService; import org.apache.hadoop.metadata.repository.graph.GraphService;
import org.apache.hadoop.metadata.repository.graph.TitanGraphProvider; import org.apache.hadoop.metadata.repository.graph.TitanGraphProvider;
import org.apache.hadoop.metadata.repository.graph.TitanGraphService; import org.apache.hadoop.metadata.repository.graph.TitanGraphService;
import org.apache.hadoop.metadata.types.TypeSystem; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
...@@ -40,14 +41,14 @@ import org.slf4j.LoggerFactory; ...@@ -40,14 +41,14 @@ import org.slf4j.LoggerFactory;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException;
import java.util.List; import java.util.List;
@Ignore @Ignore
public class HiveGraphRepositoryTest { public class HiveGraphRepositoryTest {
protected HiveTypeSystem hts; protected HiveTypeSystem hts;
GraphBackedMetadataRepository repository; private GraphBackedMetadataRepository repository;
private GraphService gs;
private static final Logger LOG = private static final Logger LOG =
LoggerFactory.getLogger(HiveGraphRepositoryTest.class); LoggerFactory.getLogger(HiveGraphRepositoryTest.class);
...@@ -55,14 +56,29 @@ public class HiveGraphRepositoryTest { ...@@ -55,14 +56,29 @@ public class HiveGraphRepositoryTest {
@Before @Before
public void setup() throws ConfigurationException, MetadataException { public void setup() throws ConfigurationException, MetadataException {
TypeSystem ts = TypeSystem.getInstance(); gs = new TitanGraphService(new TitanGraphProvider());
GraphService gs = new TitanGraphService(new TitanGraphProvider());
repository = new GraphBackedMetadataRepository(gs); repository = new GraphBackedMetadataRepository(gs);
hts = HiveTypeSystem.getInstance(); hts = HiveTypeSystem.getInstance();
} }
@After
public void tearDown() {
Graph graph = gs.getBlueprintsGraph();
System.out.println("*******************Graph Dump****************************");
System.out.println("Vertices of " + graph);
for (Vertex vertex : graph.getVertices()) {
System.out.println(GraphHelper.vertexString(vertex));
}
System.out.println("Edges of " + graph);
for (Edge edge : graph.getEdges()) {
System.out.println(GraphHelper.edgeString(edge));
}
System.out.println("*******************Graph Dump****************************");
}
@Test @Test
public void testHiveImport() throws MetaException, MetadataException, IOException { public void testHiveImport() throws Exception {
HiveImporter hImporter = new HiveImporter(repository, hts, new HiveMetaStoreClient(new HiveConf())); HiveImporter hImporter = new HiveImporter(repository, hts, new HiveMetaStoreClient(new HiveConf()));
hImporter.importHiveMetadata(); hImporter.importHiveMetadata();
......
...@@ -23,8 +23,11 @@ final class Constants { ...@@ -23,8 +23,11 @@ final class Constants {
private Constants() { private Constants() {
} }
static final String GUID_PROPERTY_KEY = "GUID"; static final String GUID_PROPERTY_KEY = "guid";
static final String ENTITY_TYPE_PROPERTY_KEY = "typeName"; static final String ENTITY_TYPE_PROPERTY_KEY = "type";
static final String VERSION_PROPERTY_KEY = "version"; static final String VERSION_PROPERTY_KEY = "version";
static final String TIMESTAMP_PROPERTY_KEY = "timestamp"; static final String TIMESTAMP_PROPERTY_KEY = "timestamp";
static final String BACKING_INDEX = "search";
static final String INDEX_NAME = "metadata";
} }
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.repository.graph; package org.apache.hadoop.metadata.repository.graph;
import com.tinkerpop.blueprints.Compare;
import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Graph; import com.tinkerpop.blueprints.Graph;
...@@ -35,11 +36,11 @@ import java.util.UUID; ...@@ -35,11 +36,11 @@ import java.util.UUID;
/** /**
* Utility class for graph operations. * Utility class for graph operations.
*/ */
public final class GraphUtils { public final class GraphHelper {
private static final Logger LOG = LoggerFactory.getLogger(GraphUtils.class); private static final Logger LOG = LoggerFactory.getLogger(GraphHelper.class);
private GraphUtils() { private GraphHelper() {
} }
public static Vertex createVertex(Graph graph, public static Vertex createVertex(Graph graph,
...@@ -50,9 +51,15 @@ public final class GraphUtils { ...@@ -50,9 +51,15 @@ public final class GraphUtils {
public static Vertex createVertex(Graph graph, public static Vertex createVertex(Graph graph,
ITypedInstance typedInstance, ITypedInstance typedInstance,
Id typedInstanceId) { Id typedInstanceId) {
return createVertex(graph, typedInstance.getTypeName(), typedInstanceId);
}
public static Vertex createVertex(Graph graph,
String typeName,
Id typedInstanceId) {
final Vertex instanceVertex = graph.addVertex(null); final Vertex instanceVertex = graph.addVertex(null);
// type // type
instanceVertex.setProperty(Constants.ENTITY_TYPE_PROPERTY_KEY, typedInstance.getTypeName()); instanceVertex.setProperty(Constants.ENTITY_TYPE_PROPERTY_KEY, typeName);
// id // id
final String guid = UUID.randomUUID().toString(); final String guid = UUID.randomUUID().toString();
...@@ -64,13 +71,20 @@ public final class GraphUtils { ...@@ -64,13 +71,20 @@ public final class GraphUtils {
return instanceVertex; return instanceVertex;
} }
public static Vertex findVertex(Graph blueprintsGraph, public static Edge addEdge(Vertex fromVertex, Vertex toVertex, String edgeLabel) {
String key, String value) { LOG.debug("Adding edge for {} -> struct label {} -> v{}",
LOG.debug("Finding vertex for key={}, value={}", key, value); fromVertex, edgeLabel, toVertex);
return fromVertex.addEdge(edgeLabel, toVertex);
}
public static Vertex findVertexByGUID(Graph blueprintsGraph,
String value) {
LOG.debug("Finding vertex for key={}, value={}", Constants.GUID_PROPERTY_KEY, value);
GraphQuery query = blueprintsGraph.query().has(key, value); GraphQuery query = blueprintsGraph.query()
.has(Constants.GUID_PROPERTY_KEY, Compare.EQUAL, value);
Iterator<Vertex> results = query.vertices().iterator(); Iterator<Vertex> results = query.vertices().iterator();
// returning one since name/type is unique // returning one since guid should be unique
return results.hasNext() ? results.next() : null; return results.hasNext() ? results.next() : null;
} }
......
...@@ -28,6 +28,7 @@ import java.util.Set; ...@@ -28,6 +28,7 @@ import java.util.Set;
import javax.inject.Inject; import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
import com.thinkaurelius.titan.core.PropertyKey;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
...@@ -62,7 +63,7 @@ public class TitanGraphService implements GraphService { ...@@ -62,7 +63,7 @@ public class TitanGraphService implements GraphService {
/** /**
* Initialize this service through injection with a custom Provider. * Initialize this service through injection with a custom Provider.
* *
* @param graph * @param graph graph implementation
* @throws ConfigurationException * @throws ConfigurationException
*/ */
@Inject @Inject
...@@ -123,10 +124,23 @@ public class TitanGraphService implements GraphService { ...@@ -123,10 +124,23 @@ public class TitanGraphService implements GraphService {
LOG.info("Indexes do not exist, Creating indexes for titanGraph using indexer.properties."); LOG.info("Indexes do not exist, Creating indexes for titanGraph using indexer.properties.");
TitanManagement mgmt = titanGraph.getManagementSystem(); TitanManagement mgmt = titanGraph.getManagementSystem();
mgmt.buildIndex("mainIndex", Vertex.class).buildMixedIndex("search"); TitanGraphIndex graphIndex = mgmt.buildIndex(Constants.INDEX_NAME, Vertex.class)
TitanGraphIndex graphIndex = mgmt.getGraphIndex("mainIndex"); .buildMixedIndex(Constants.BACKING_INDEX);
mgmt.addIndexKey(graphIndex, mgmt.makePropertyKey("guid").dataType(String.class).make()); PropertyKey guidKey = mgmt
.makePropertyKey(Constants.GUID_PROPERTY_KEY)
.dataType(String.class).make();
mgmt.buildIndex("byGUID", Vertex.class)
.addKey(guidKey)
.unique()
.buildCompositeIndex();
PropertyKey typeKey = mgmt
.makePropertyKey(Constants.ENTITY_TYPE_PROPERTY_KEY)
.dataType(String.class).make();
mgmt.buildIndex("byType", Vertex.class)
.addKey(typeKey)
.buildCompositeIndex();
Configuration indexConfig = getConfiguration("indexer.properties", INDEXER_PREFIX); Configuration indexConfig = getConfiguration("indexer.properties", INDEXER_PREFIX);
// Properties are formatted: prop_name:type;prop_name:type // Properties are formatted: prop_name:type;prop_name:type
...@@ -134,7 +148,7 @@ public class TitanGraphService implements GraphService { ...@@ -134,7 +148,7 @@ public class TitanGraphService implements GraphService {
if (!indexConfig.isEmpty()) { if (!indexConfig.isEmpty()) {
// Get a list of property names to iterate through... // Get a list of property names to iterate through...
List<String> propList = new ArrayList<String>(); List<String> propList = new ArrayList<>();
Iterator<String> it = indexConfig.getKeys("property.name"); Iterator<String> it = indexConfig.getKeys("property.name");
...@@ -147,9 +161,9 @@ public class TitanGraphService implements GraphService { ...@@ -147,9 +161,9 @@ public class TitanGraphService implements GraphService {
// Pull the property name and index, so we can register the name // Pull the property name and index, so we can register the name
// and look up the type. // and look up the type.
String prop = it.next().toString(); String prop = it.next();
String index = prop.substring(prop.lastIndexOf(".") + 1); String index = prop.substring(prop.lastIndexOf(".") + 1);
String type = null; String type;
prop = indexConfig.getProperty(prop).toString(); prop = indexConfig.getProperty(prop).toString();
// Look up the type for the specified property name. // Look up the type for the specified property name.
......
package org.apache.hadoop.metadata.repository.graph;
import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.RepositoryModuleBaseTest;
import org.apache.hadoop.metadata.Struct;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.DataTypes;
import org.apache.hadoop.metadata.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.types.EnumValue;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.IDataType;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructTypeDefinition;
import org.apache.hadoop.metadata.types.TraitType;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
public class GraphRepoMapperTest extends RepositoryModuleBaseTest {
private static final String DATABASE_TYPE = "hive_database";
private static final String DATABASE_NAME = "foo";
private static final String TABLE_TYPE = "hive_table";
private static final String TABLE_NAME = "bar";
private TitanGraphService titanGraphService;
private GraphBackedMetadataRepository repositoryService;
private TypeSystem typeSystem;
@BeforeClass
public void setUp() throws Exception {
titanGraphService = super.injector.getInstance(TitanGraphService.class);
titanGraphService.start();
repositoryService = super.injector.getInstance(GraphBackedMetadataRepository.class);
repositoryService.start();
typeSystem = TypeSystem.getInstance();
createHiveTypes();
}
@Test
public void testSubmitEntity() throws Exception {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
databaseInstance.set("name", DATABASE_NAME);
databaseInstance.set("description", "foo database");
System.out.println("databaseInstance = " + databaseInstance);
ClassType dbType = typeSystem.getDataType(ClassType.class, DATABASE_TYPE);
ITypedReferenceableInstance db = dbType.convert(databaseInstance, Multiplicity.REQUIRED);
System.out.println("db = " + db);
String dbGUID = repositoryService.createEntity(db, DATABASE_TYPE);
System.out.println("added db = " + dbGUID);
Referenceable dbInstance = new Referenceable(
dbGUID, DATABASE_TYPE, databaseInstance.getValuesMap());
ITypedReferenceableInstance table = createHiveTableInstance(dbInstance);
String tableGUID = repositoryService.createEntity(table, TABLE_TYPE);
System.out.println("added table = " + tableGUID);
dumpGraph();
}
private void dumpGraph() {
TitanGraph graph = titanGraphService.getTitanGraph();
System.out.println("*******************Graph Dump****************************");
System.out.println("Vertices of " + graph);
for (Vertex vertex : graph.getVertices()) {
System.out.println(GraphHelper.vertexString(vertex));
}
System.out.println("Edges of " + graph);
for (Edge edge : graph.getEdges()) {
System.out.println(GraphHelper.edgeString(edge));
}
System.out.println("*******************Graph Dump****************************");
}
private void createHiveTypes() throws Exception {
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
createClassTypeDef(DATABASE_TYPE,
ImmutableList.<String>of(),
createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("description", DataTypes.STRING_TYPE));
StructTypeDefinition structTypeDefinition =
new StructTypeDefinition("serdeType",
new AttributeDefinition[] {
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("serde", DataTypes.STRING_TYPE)
});
EnumValue values[] = {
new EnumValue("MANAGED", 1),
new EnumValue("EXTERNAL", 2),
};
EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("tableType", values);
typeSystem.defineEnumType(enumTypeDefinition);
HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
createClassTypeDef(TABLE_TYPE,
ImmutableList.<String>of(),
createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("description", DataTypes.STRING_TYPE),
createRequiredAttrDef("type", DataTypes.STRING_TYPE),
new AttributeDefinition("tableType", "tableType",
Multiplicity.REQUIRED, false, null),
/*
new AttributeDefinition("columns",
String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
Multiplicity.COLLECTION, false, null),
*/
new AttributeDefinition("serde1",
"serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde2",
"serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("database",
DATABASE_TYPE, Multiplicity.REQUIRED, true, null));
HierarchicalTypeDefinition<TraitType> classificationTypeDefinition =
createTraitTypeDef("classification",
ImmutableList.<String>of(),
createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
typeSystem.defineTypes(
ImmutableList.of(structTypeDefinition),
ImmutableList.of(classificationTypeDefinition),
ImmutableList.of(databaseTypeDefinition, tableTypeDefinition));
}
private ITypedReferenceableInstance createHiveTableInstance(
Referenceable databaseInstance) throws Exception {
/*
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
databaseInstance.set("name", DATABASE_NAME);
databaseInstance.set("description", "foo database");
*/
Referenceable tableInstance = new Referenceable(TABLE_TYPE, "classification");
tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table");
tableInstance.set("type", "managed");
tableInstance.set("tableType", 1); // enum
tableInstance.set("database", databaseInstance);
Struct traitInstance = (Struct) tableInstance.getTrait("classification");
traitInstance.set("tag", "foundation_etl");
Struct serde1Instance = new Struct("serdeType");
serde1Instance.set("name", "serde1");
serde1Instance.set("serde", "serde1");
tableInstance.set("serde1", serde1Instance);
Struct serde2Instance = new Struct("serdeType");
serde2Instance.set("name", "serde2");
serde2Instance.set("serde", "serde2");
tableInstance.set("serde2", serde2Instance);
ClassType tableType = typeSystem.getDataType(ClassType.class, TABLE_TYPE);
return tableType.convert(tableInstance, Multiplicity.REQUIRED);
}
protected AttributeDefinition createUniqueRequiredAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(),
Multiplicity.REQUIRED, false, true, true, null);
}
protected AttributeDefinition createRequiredAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(),
Multiplicity.REQUIRED, false, null);
}
@SuppressWarnings("unchecked")
protected HierarchicalTypeDefinition<TraitType> createTraitTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(TraitType.class, name, superTypes, attrDefs);
}
@SuppressWarnings("unchecked")
protected HierarchicalTypeDefinition<ClassType> createClassTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(ClassType.class, name, superTypes, attrDefs);
}
}
package org.apache.hadoop.metadata.services; /**
* Licensed to the Apache Software Foundation (ASF) under one
import java.util.List; * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import javax.inject.Inject; package org.apache.hadoop.metadata.services;
import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.Referenceable; import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.RepositoryMetadataModule; import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository; import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphUtils; import org.apache.hadoop.metadata.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.repository.graph.TitanGraphService; import org.apache.hadoop.metadata.repository.graph.TitanGraphService;
import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.storage.memory.MemRepository;
import org.apache.hadoop.metadata.types.AttributeDefinition; import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType; import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.DataTypes; import org.apache.hadoop.metadata.types.DataTypes;
import org.apache.hadoop.metadata.types.HierarchicalType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition; import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.IDataType; import org.apache.hadoop.metadata.types.IDataType;
import org.apache.hadoop.metadata.types.Multiplicity; import org.apache.hadoop.metadata.types.Multiplicity;
...@@ -24,16 +40,11 @@ import org.apache.hadoop.metadata.types.StructTypeDefinition; ...@@ -24,16 +40,11 @@ import org.apache.hadoop.metadata.types.StructTypeDefinition;
import org.apache.hadoop.metadata.types.TraitType; import org.apache.hadoop.metadata.types.TraitType;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Guice; import org.testng.annotations.Guice;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList; import java.util.List;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
/** /**
* GraphBackedMetadataRepository test * GraphBackedMetadataRepository test
...@@ -41,18 +52,17 @@ import com.tinkerpop.blueprints.Vertex; ...@@ -41,18 +52,17 @@ import com.tinkerpop.blueprints.Vertex;
* Guice loads the dependencies and injects the necessary objects * Guice loads the dependencies and injects the necessary objects
* *
*/ */
@Test (enabled = false) @Test
@Guice(modules = RepositoryMetadataModule.class) @Guice(modules = RepositoryMetadataModule.class)
public class GraphBackedMetadataRepositoryTest { public class GraphBackedMetadataRepositoryTest {
private static final String ENTITY_TYPE = "hive-table"; private static final String ENTITY_TYPE = "Department";
@Inject @Inject
TitanGraphService titanGraphService; TitanGraphService titanGraphService;
@Inject @Inject
GraphBackedMetadataRepository repositoryService; GraphBackedMetadataRepository repositoryService;
private IRepository repo;
private TypeSystem ts; private TypeSystem ts;
private String guid; private String guid;
...@@ -69,11 +79,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -69,11 +79,7 @@ public class GraphBackedMetadataRepositoryTest {
defineDeptEmployeeTypes(ts); defineDeptEmployeeTypes(ts);
} }
@AfterClass @Test
public void tearDown() throws Exception {
}
@Test (enabled = false)
public void testSubmitEntity() throws Exception { public void testSubmitEntity() throws Exception {
Referenceable hrDept = createDeptEg1(ts); Referenceable hrDept = createDeptEg1(ts);
ClassType deptType = ts.getDataType(ClassType.class, "Department"); ClassType deptType = ts.getDataType(ClassType.class, "Department");
...@@ -88,34 +94,33 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -88,34 +94,33 @@ public class GraphBackedMetadataRepositoryTest {
private void dumpGraph() { private void dumpGraph() {
TitanGraph graph = titanGraphService.getTitanGraph(); TitanGraph graph = titanGraphService.getTitanGraph();
for (Vertex v : graph.getVertices()) { for (Vertex v : graph.getVertices()) {
// System.out.println("****v = " + GraphUtils.vertexString(v)); System.out.println("****v = " + GraphHelper.vertexString(v));
System.out.println("v = " + v);
for (Edge e : v.getEdges(Direction.OUT)) { for (Edge e : v.getEdges(Direction.OUT)) {
System.out.println("****e = " + GraphUtils.edgeString(e)); System.out.println("****e = " + GraphHelper.edgeString(e));
} }
} }
} }
@Test(dependsOnMethods = "testSubmitEntity", enabled = false) @Test(dependsOnMethods = "testSubmitEntity")
public void testGetEntityDefinition() throws Exception { public void testGetEntityDefinition() throws Exception {
ITypedReferenceableInstance entity = repositoryService.getEntityDefinition(guid); ITypedReferenceableInstance entity = repositoryService.getEntityDefinition(guid);
Assert.assertNotNull(entity); Assert.assertNotNull(entity);
} }
@Test (enabled = false) @Test
public void testGetEntityDefinitionNonExistent() throws Exception { public void testGetEntityDefinitionNonExistent() throws Exception {
ITypedReferenceableInstance entity = repositoryService.getEntityDefinition("blah"); ITypedReferenceableInstance entity = repositoryService.getEntityDefinition("blah");
Assert.assertNull(entity); Assert.assertNull(entity);
} }
@Test (enabled = false) @Test
public void testGetEntityList() throws Exception { public void testGetEntityList() throws Exception {
List<String> entityList = repositoryService.getEntityList(ENTITY_TYPE); List<String> entityList = repositoryService.getEntityList(ENTITY_TYPE);
Assert.assertNotNull(entityList); Assert.assertNotNull(entityList);
Assert.assertEquals(entityList.size(), 0); // as this is not implemented yet Assert.assertEquals(entityList.size(), 1); // one department
} }
@Test (enabled = false) @Test
public void testRawSearch1() throws Exception { public void testRawSearch1() throws Exception {
Referenceable hrDept = createDeptEg1(ts); Referenceable hrDept = createDeptEg1(ts);
ClassType deptType = ts.getDataType(ClassType.class, "Department"); ClassType deptType = ts.getDataType(ClassType.class, "Department");
...@@ -123,22 +128,19 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -123,22 +128,19 @@ public class GraphBackedMetadataRepositoryTest {
guid = repositoryService.createEntity(hrDept2, ENTITY_TYPE); guid = repositoryService.createEntity(hrDept2, ENTITY_TYPE);
// Query for all Vertices in Graph // Query for all Vertices in Graph
Object r = repositoryService.searchByGremlin("g.V.toList()"); Object r = repositoryService.searchByGremlin("g.V.toList()");
//System.out.println(r); System.out.println("search result = " + r);
// Query for all Vertices of a Type // Query for all Vertices of a Type
r = repositoryService.searchByGremlin("g.V.filter{it.typeName == 'Department'}.toList()"); r = repositoryService.searchByGremlin("g.V.filter{it.typeName == 'Department'}.toList()");
//System.out.println(r); System.out.println("search result = " + r);
// Property Query: list all Person names // Property Query: list all Person names
r = repositoryService.searchByGremlin("g.V.filter{it.typeName == 'Person'}.'Person.name'.toList()"); r = repositoryService.searchByGremlin("g.V.filter{it.typeName == 'Person'}.'Person.name'.toList()");
//System.out.println(r); System.out.println("search result = " + r);
} }
/* /*
* Class Hierarchy is: * Class Hierarchy is:
* Department(name : String, employees : Array[Person]) * Department(name : String, employees : Array[Person])
...@@ -167,7 +169,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -167,7 +169,7 @@ public class GraphBackedMetadataRepositoryTest {
); );
HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager", HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager",
ImmutableList.<String>of("Person"), ImmutableList.of("Person"),
new AttributeDefinition("subordinates", new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, false, String.format("array<%s>", "Person"), Multiplicity.COLLECTION, false,
"manager") "manager")
...@@ -180,18 +182,8 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -180,18 +182,8 @@ public class GraphBackedMetadataRepositoryTest {
); );
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(), ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(securityClearanceTypeDef), ImmutableList.of(securityClearanceTypeDef),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef, ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
managerTypeDef));
ImmutableList<HierarchicalType> types = ImmutableList.of(
ts.getDataType(HierarchicalType.class, "SecurityClearance"),
ts.getDataType(ClassType.class, "Department"),
ts.getDataType(ClassType.class, "Person"),
ts.getDataType(ClassType.class, "Manager")
);
repo.defineTypes(types);
} }
protected Referenceable createDeptEg1(TypeSystem ts) throws MetadataException { protected Referenceable createDeptEg1(TypeSystem ts) throws MetadataException {
...@@ -207,14 +199,15 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -207,14 +199,15 @@ public class GraphBackedMetadataRepositoryTest {
john.set("manager", jane); john.set("manager", jane);
hrDept.set("employees", ImmutableList.<Referenceable>of(john, jane)); hrDept.set("employees", ImmutableList.of(john, jane));
jane.set("subordinates", ImmutableList.<Referenceable>of(john)); jane.set("subordinates", ImmutableList.of(john));
jane.getTrait("SecurityClearance").set("level", 1); jane.getTrait("SecurityClearance").set("level", 1);
ClassType deptType = ts.getDataType(ClassType.class, "Department"); ClassType deptType = ts.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED); ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
Assert.assertNotNull(hrDept2);
return hrDept; return hrDept;
} }
......
...@@ -52,6 +52,13 @@ public class Referenceable extends Struct implements IReferenceableInstance { ...@@ -52,6 +52,13 @@ public class Referenceable extends Struct implements IReferenceableInstance {
traits = ImmutableMap.of(); traits = ImmutableMap.of();
} }
public Referenceable(String guid, String typeName, Map<String, Object> values) {
super(typeName, values);
id = new Id(guid, 0, typeName);
traitNames = ImmutableList.of();
traits = ImmutableMap.of();
}
@Override @Override
public ImmutableList<String> getTraits() { public ImmutableList<String> getTraits() {
return traitNames; return traitNames;
......
...@@ -19,7 +19,6 @@ ...@@ -19,7 +19,6 @@
package org.apache.hadoop.metadata.storage; package org.apache.hadoop.metadata.storage;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.IReferenceableInstance;
import org.apache.hadoop.metadata.IStruct; import org.apache.hadoop.metadata.IStruct;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
...@@ -28,6 +27,7 @@ import org.apache.hadoop.metadata.types.FieldMapping; ...@@ -28,6 +27,7 @@ import org.apache.hadoop.metadata.types.FieldMapping;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.Date; import java.util.Date;
import java.util.UUID;
public class Id implements ITypedReferenceableInstance { public class Id implements ITypedReferenceableInstance {
...@@ -58,6 +58,16 @@ public class Id implements ITypedReferenceableInstance { ...@@ -58,6 +58,16 @@ public class Id implements ITypedReferenceableInstance {
} }
} }
public boolean isAssigned() {
try {
UUID.fromString(id);
} catch (IllegalArgumentException e) {
return false;
}
return true;
}
public String toString() { public String toString() {
return String.format("(type: %s, id: %s)", className, isUnassigned() ? "<unassigned>" : "" + id); return String.format("(type: %s, id: %s)", className, isUnassigned() ? "<unassigned>" : "" + id);
} }
......
...@@ -19,7 +19,4 @@ ...@@ -19,7 +19,4 @@
# GraphService implementation # GraphService implementation
metadata.graph.impl.class=org.apache.hadoop.metadata.repository.graph.TitanGraphService metadata.graph.impl.class=org.apache.hadoop.metadata.repository.graph.TitanGraphService
# Graph Storage
metadata.graph.storage.backend=inmemory
metadata.enableTLS=false metadata.enableTLS=false
...@@ -2,6 +2,6 @@ storage.backend=inmemory ...@@ -2,6 +2,6 @@ storage.backend=inmemory
# Graph Search Index # Graph Search Index
index.search.backend=elasticsearch index.search.backend=elasticsearch
index.search.directory=target/data/es index.search.directory=webapp/target/data/es
index.search.elasticsearch.client-only=false index.search.elasticsearch.client-only=false
index.search.elasticsearch.local-mode=true index.search.elasticsearch.local-mode=true
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
<appender name="console" class="org.apache.log4j.ConsoleAppender">
<param name="Target" value="System.out"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/>
</layout>
</appender>
<appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="${user.dir}/target/logs/application.log"/>
<param name="Append" value="true"/>
<param name="Threshold" value="debug"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/>
</layout>
</appender>
<appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="${user.dir}/target/logs/audit.log"/>
<param name="Append" value="true"/>
<param name="Threshold" value="debug"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %x %m%n"/>
</layout>
</appender>
<logger name="org.apache.hadoop.metadata" additivity="false">
<level value="debug"/>
<appender-ref ref="console"/>
<appender-ref ref="FILE"/>
</logger>
<logger name="AUDIT">
<level value="info"/>
<appender-ref ref="console"/>
<appender-ref ref="AUDIT"/>
</logger>
<root>
<priority value="info"/>
<appender-ref ref="console"/>
<appender-ref ref="FILE"/>
</root>
</log4j:configuration>
...@@ -34,6 +34,12 @@ public abstract class BaseResourceIT { ...@@ -34,6 +34,12 @@ public abstract class BaseResourceIT {
service = client.resource(UriBuilder.fromUri(baseUrl).build()); service = client.resource(UriBuilder.fromUri(baseUrl).build());
} }
protected AttributeDefinition createUniqueRequiredAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(),
Multiplicity.REQUIRED, false, true, true, null);
}
protected AttributeDefinition createRequiredAttrDef(String name, protected AttributeDefinition createRequiredAttrDef(String name,
IDataType dataType) { IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(), return new AttributeDefinition(name, dataType.getName(),
......
...@@ -21,14 +21,19 @@ package org.apache.hadoop.metadata.web.resources; ...@@ -21,14 +21,19 @@ package org.apache.hadoop.metadata.web.resources;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import org.apache.hadoop.metadata.ITypedInstance;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.ITypedStruct;
import org.apache.hadoop.metadata.Referenceable; import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.Struct; import org.apache.hadoop.metadata.Struct;
import org.apache.hadoop.metadata.json.Serialization$; import org.apache.hadoop.metadata.json.Serialization$;
import org.apache.hadoop.metadata.json.TypesSerialization; import org.apache.hadoop.metadata.json.TypesSerialization;
import org.apache.hadoop.metadata.types.AttributeDefinition; import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.AttributeInfo;
import org.apache.hadoop.metadata.types.ClassType; import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.DataTypes; import org.apache.hadoop.metadata.types.DataTypes;
import org.apache.hadoop.metadata.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.types.EnumValue;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition; import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.Multiplicity; import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructTypeDefinition; import org.apache.hadoop.metadata.types.StructTypeDefinition;
...@@ -126,20 +131,33 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -126,20 +131,33 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
LOG.debug("tableInstanceAfterGet = " + definition); LOG.debug("tableInstanceAfterGet = " + definition);
// todo - this fails with type error, strange // todo - this fails with type error, strange
// ITypedReferenceableInstance tableInstanceAfterGet = Serialization$.MODULE$.fromJson(definition); ITypedReferenceableInstance tableInstanceAfterGet = Serialization$.MODULE$.fromJson(definition);
// Assert.assertTrue(areEqual(tableInstance, tableInstanceAfterGet)); Assert.assertTrue(areEqual(tableInstance, tableInstanceAfterGet));
} }
private boolean areEqual(ITypedInstance actual,
ITypedInstance expected) throws Exception {
/* /*
private boolean areEqual(ITypedReferenceableInstance actual, Assert.assertEquals(Serialization$.MODULE$.toJson(actual),
ITypedReferenceableInstance expected) throws Exception { Serialization$.MODULE$.toJson(expected));
*/
for (AttributeInfo attributeInfo : actual.fieldMapping().fields.values()) { for (AttributeInfo attributeInfo : actual.fieldMapping().fields.values()) {
Assert.assertEquals(actual.get(attributeInfo.name), expected.get(attributeInfo.name)); final DataTypes.TypeCategory typeCategory = attributeInfo.dataType().getTypeCategory();
if (typeCategory == DataTypes.TypeCategory.STRUCT
|| typeCategory == DataTypes.TypeCategory.TRAIT
|| typeCategory == DataTypes.TypeCategory.CLASS) {
areEqual((ITypedStruct) actual.get(attributeInfo.name),
(ITypedStruct) expected.get(attributeInfo.name));
} else if (typeCategory == DataTypes.TypeCategory.PRIMITIVE
|| typeCategory == DataTypes.TypeCategory.ENUM) {
Assert.assertEquals(actual.get(attributeInfo.name),
expected.get(attributeInfo.name));
}
} }
return true; return true;
} }
*/
@Test @Test
public void testGetInvalidEntityDefinition() throws Exception { public void testGetInvalidEntityDefinition() throws Exception {
...@@ -220,6 +238,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -220,6 +238,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
createRequiredAttrDef("description", DataTypes.STRING_TYPE)); createRequiredAttrDef("description", DataTypes.STRING_TYPE));
typeSystem.defineClassType(testTypeDefinition); typeSystem.defineClassType(testTypeDefinition);
@SuppressWarnings("unchecked")
String typesAsJSON = TypesSerialization.toJson(typeSystem, String typesAsJSON = TypesSerialization.toJson(typeSystem,
Arrays.asList(new String[]{"test"})); Arrays.asList(new String[]{"test"}));
sumbitType(typesAsJSON, "test"); sumbitType(typesAsJSON, "test");
...@@ -229,7 +248,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -229,7 +248,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition = HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
createClassTypeDef(DATABASE_TYPE, createClassTypeDef(DATABASE_TYPE,
ImmutableList.<String>of(), ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE), createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("description", DataTypes.STRING_TYPE)); createRequiredAttrDef("description", DataTypes.STRING_TYPE));
StructTypeDefinition structTypeDefinition = StructTypeDefinition structTypeDefinition =
...@@ -239,12 +258,22 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -239,12 +258,22 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
createRequiredAttrDef("serde", DataTypes.STRING_TYPE) createRequiredAttrDef("serde", DataTypes.STRING_TYPE)
}); });
EnumValue values[] = {
new EnumValue("MANAGED", 1),
new EnumValue("EXTERNAL", 2),
};
EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("tableType", values);
typeSystem.defineEnumType(enumTypeDefinition);
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
createClassTypeDef(TABLE_TYPE, createClassTypeDef(TABLE_TYPE,
ImmutableList.<String>of(), ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE), createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("description", DataTypes.STRING_TYPE), createRequiredAttrDef("description", DataTypes.STRING_TYPE),
createRequiredAttrDef("type", DataTypes.STRING_TYPE), createRequiredAttrDef("type", DataTypes.STRING_TYPE),
new AttributeDefinition("tableType", "tableType",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde1", new AttributeDefinition("serde1",
"serdeType", Multiplicity.REQUIRED, false, null), "serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde2", new AttributeDefinition("serde2",
...@@ -264,8 +293,14 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -264,8 +293,14 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
} }
private void submitTypes() throws Exception { private void submitTypes() throws Exception {
@SuppressWarnings("unchecked")
String typesAsJSON = TypesSerialization.toJson(typeSystem, String typesAsJSON = TypesSerialization.toJson(typeSystem,
Arrays.asList(new String[]{DATABASE_TYPE, TABLE_TYPE, "serdeType", "classification"})); Arrays.asList(new String[]{
"tableType",
DATABASE_TYPE,
TABLE_TYPE,
"serdeType",
"classification"}));
sumbitType(typesAsJSON, TABLE_TYPE); sumbitType(typesAsJSON, TABLE_TYPE);
} }
...@@ -298,6 +333,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -298,6 +333,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
tableInstance.set("name", TABLE_NAME); tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table"); tableInstance.set("description", "bar table");
tableInstance.set("type", "managed"); tableInstance.set("type", "managed");
tableInstance.set("tableType", 1); // enum
tableInstance.set("database", databaseInstance); tableInstance.set("database", databaseInstance);
Struct traitInstance = (Struct) tableInstance.getTrait("classification"); Struct traitInstance = (Struct) tableInstance.getTrait("classification");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment