From 5ffad3285785d131a1d9b0d81ea982ce191e85b5 Mon Sep 17 00:00:00 2001
From: Venkatesh Seetharam <venkatesh@apache.org>
Date: Sat, 24 Jan 2015 13:59:17 -0800
Subject: [PATCH] Fix bug with mapping existing references, minor refactoring. Contributed by Venkatesh Seetharam

---
 hivetypes/pom.xml                                                                                       |  19 +++++++++++++++++++
 hivetypes/src/main/java/org/apache/hadoop/metadata/hivetypes/HiveImporter.java                          |  69 ++++++++++++++++++++++++++++++++++-----------------------------------
 hivetypes/src/main/java/org/apache/hadoop/metadata/hivetypes/HiveTypeSystem.java                        |  29 +++++++++++++++++++++--------
 hivetypes/src/main/resources/hive-site.xml                                                              |   2 +-
 hivetypes/src/test/java/org/apache/hadoop/metadata/hivetypes/HiveGraphRepositoryTest.java               |  34 +++++++++++++++++++++++++---------
 repository/src/main/java/org/apache/hadoop/metadata/repository/graph/Constants.java                     |   7 +++++--
 repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphBackedMetadataRepository.java | 194 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++---------------------------------------------------------------------------
 repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphHelper.java                   | 123 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphUtils.java                    | 109 -------------------------------------------------------------------------------------------------------------
 repository/src/main/java/org/apache/hadoop/metadata/repository/graph/TitanGraphService.java             |  30 ++++++++++++++++++++++--------
 repository/src/test/java/org/apache/hadoop/metadata/repository/graph/GraphRepoMapperTest.java           | 196 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 repository/src/test/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryTest.java     |  95 ++++++++++++++++++++++++++++++++++++++++++++---------------------------------------------------
 typesystem/src/main/java/org/apache/hadoop/metadata/Referenceable.java                                  |   7 +++++++
 typesystem/src/main/java/org/apache/hadoop/metadata/storage/Id.java                                     |  12 +++++++++++-
 webapp/src/main/resources/application.properties                                                        |  22 ++++++++++++++++++++++
 webapp/src/main/resources/graph.properties                                                              |   7 +++++++
 webapp/src/main/resources/indexer.properties                                                            |  40 ++++++++++++++++++++++++++++++++++++++++
 webapp/src/main/resources/log4j.xml                                                                     |  66 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 webapp/src/test/java/org/apache/hadoop/metadata/web/resources/BaseResourceIT.java                       |   6 ++++++
 webapp/src/test/java/org/apache/hadoop/metadata/web/resources/EntityJerseyResourceIT.java               |  54 +++++++++++++++++++++++++++++++++++++++++++++---------
 webapp/src/test/resources/application.properties                                                        |  25 -------------------------
 webapp/src/test/resources/graph.properties                                                              |   7 -------
 webapp/src/test/resources/indexer.properties                                                            |  40 ----------------------------------------
 23 files changed, 813 insertions(+), 380 deletions(-)
 create mode 100644 repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphHelper.java
 delete mode 100644 repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphUtils.java
 create mode 100644 repository/src/test/java/org/apache/hadoop/metadata/repository/graph/GraphRepoMapperTest.java
 create mode 100644 webapp/src/main/resources/application.properties
 create mode 100644 webapp/src/main/resources/graph.properties
 create mode 100644 webapp/src/main/resources/indexer.properties
 create mode 100644 webapp/src/main/resources/log4j.xml
 delete mode 100644 webapp/src/test/resources/application.properties
 delete mode 100644 webapp/src/test/resources/graph.properties
 delete mode 100644 webapp/src/test/resources/indexer.properties

diff --git a/hivetypes/pom.xml b/hivetypes/pom.xml
index 0a381d1..73e6c7c 100644
--- a/hivetypes/pom.xml
+++ b/hivetypes/pom.xml
@@ -74,6 +74,25 @@
             <groupId>log4j</groupId>
             <artifactId>log4j</artifactId>
         </dependency>
+        <dependency>
+            <groupId>com.sun.jersey</groupId>
+            <artifactId>jersey-core</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>com.sun.jersey</groupId>
+            <artifactId>jersey-server</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>com.sun.jersey</groupId>
+            <artifactId>jersey-client</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>com.sun.jersey</groupId>
+            <artifactId>jersey-json</artifactId>
+        </dependency>
     </dependencies>
 
 </project>
\ No newline at end of file
diff --git a/hivetypes/src/main/java/org/apache/hadoop/metadata/hivetypes/HiveImporter.java b/hivetypes/src/main/java/org/apache/hadoop/metadata/hivetypes/HiveImporter.java
index 4212572..879221a 100644
--- a/hivetypes/src/main/java/org/apache/hadoop/metadata/hivetypes/HiveImporter.java
+++ b/hivetypes/src/main/java/org/apache/hadoop/metadata/hivetypes/HiveImporter.java
@@ -19,8 +19,19 @@
 package org.apache.hadoop.metadata.hivetypes;
 
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.*;
-import org.apache.hadoop.metadata.*;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.metadata.ITypedReferenceableInstance;
+import org.apache.hadoop.metadata.ITypedStruct;
+import org.apache.hadoop.metadata.MetadataException;
+import org.apache.hadoop.metadata.Referenceable;
+import org.apache.hadoop.metadata.Struct;
 import org.apache.hadoop.metadata.repository.MetadataRepository;
 import org.apache.hadoop.metadata.storage.IRepository;
 import org.apache.hadoop.metadata.storage.Id;
@@ -28,23 +39,18 @@ import org.apache.hadoop.metadata.storage.RepositoryException;
 import org.apache.hadoop.metadata.types.IDataType;
 import org.apache.hadoop.metadata.types.Multiplicity;
 import org.apache.hadoop.metadata.types.StructType;
-import org.apache.hadoop.metadata.types.TypeSystem;
-import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.List;
 
-;
-
 public class HiveImporter {
 
     private final HiveMetaStoreClient hiveMetastoreClient;
 
     private static final Logger LOG =
             LoggerFactory.getLogger(HiveImporter.class);
-    private TypeSystem typeSystem;
     private IRepository repository;
     private MetadataRepository graphRepository;
     private HiveTypeSystem hiveTypeSystem;
@@ -68,7 +74,6 @@ public class HiveImporter {
     public HiveImporter(IRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException {
         this(hts, hmc);
 
-
         if (repo == null) {
             LOG.error("repository is null");
             throw new RuntimeException("repository is null");
@@ -77,13 +82,11 @@ public class HiveImporter {
         repository = repo;
 
         repository.defineTypes(hts.getHierarchicalTypeDefinitions());
-
     }
 
     private HiveImporter(HiveTypeSystem hts, HiveMetaStoreClient hmc) {
         this.hiveMetastoreClient = hmc;
         this.hiveTypeSystem = hts;
-        typeSystem = TypeSystem.getInstance();
         dbInstances = new ArrayList<>();
         tableInstances = new ArrayList<>();
         partitionInstances = new ArrayList<>();
@@ -119,18 +122,21 @@ public class HiveImporter {
         }
     }
 
-    private ITypedReferenceableInstance createInstance(Referenceable ref)
+    private Referenceable createInstance(Referenceable ref)
             throws MetadataException {
         if (repository != null) {
-            return repository.create(ref);
+            return (Referenceable) repository.create(ref);
         } else {
             String typeName = ref.getTypeName();
             IDataType dataType = hiveTypeSystem.getDataType(typeName);
             LOG.debug("creating instance of type " + typeName + " dataType " + dataType);
             ITypedReferenceableInstance instance =
                     (ITypedReferenceableInstance) dataType.convert(ref, Multiplicity.OPTIONAL);
-            graphRepository.createEntity(instance, typeName);
-            return instance;
+            String guid = graphRepository.createEntity(instance, typeName);
+            System.out.println("creating instance of type " + typeName + " dataType " + dataType
+                    + ", guid: " + guid);
+
+            return new Referenceable(guid, ref.getTypeName(), ref.getValuesMap());
         }
     }
 
@@ -146,17 +152,15 @@ public class HiveImporter {
             dbRef.set("parameters", hiveDB.getParameters());
             dbRef.set("ownerName", hiveDB.getOwnerName());
             dbRef.set("ownerType", hiveDB.getOwnerType().getValue());
-            ITypedReferenceableInstance dbRefTyped = createInstance(dbRef);
+            Referenceable dbRefTyped = createInstance(dbRef);
             dbInstances.add(dbRefTyped.getId());
             importTables(db, dbRefTyped);
-        } catch (NoSuchObjectException nsoe) {
-            throw new MetadataException(nsoe);
-        } catch (TException te) {
-            throw new MetadataException(te);
+        } catch (Exception e) {
+            throw new MetadataException(e);
         }
     }
 
-    private void importTables(String db, ITypedReferenceableInstance dbRefTyped) throws MetadataException {
+    private void importTables(String db, Referenceable dbRefTyped) throws MetadataException {
         try {
             List<String> hiveTables = hiveMetastoreClient.getAllTables(db);
 
@@ -177,7 +181,7 @@ public class HiveImporter {
                 ITypedStruct sdStruct = fillStorageDescStruct(storageDesc);
                 tableRef.set("sd", sdStruct);
                 tableRef.set("columns", sdStruct.get("cols"));
-                List<ITypedReferenceableInstance> partKeys = new ArrayList<>();
+                List<Referenceable> partKeys = new ArrayList<>();
                 Referenceable colRef;
                 if (hiveTable.getPartitionKeysSize() > 0) {
                     for (FieldSchema fs : hiveTable.getPartitionKeys()) {
@@ -185,7 +189,7 @@ public class HiveImporter {
                         colRef.set("name", fs.getName());
                         colRef.set("type", fs.getType());
                         colRef.set("comment", fs.getComment());
-                        ITypedReferenceableInstance colRefTyped = createInstance(colRef);
+                        Referenceable colRefTyped = createInstance(colRef);
                         partKeys.add(colRefTyped);
                     }
                     tableRef.set("partitionKeys", partKeys);
@@ -200,7 +204,7 @@ public class HiveImporter {
                 tableRef.set("tableType", hiveTable.getTableType());
                 tableRef.set("temporary", hiveTable.isTemporary());
 
-                ITypedReferenceableInstance tableRefTyped = createInstance(tableRef);
+                Referenceable tableRefTyped = createInstance(tableRef);
                 tableInstances.add(tableRefTyped.getId());
 
 
@@ -218,25 +222,22 @@ public class HiveImporter {
                         partRef.set("sd", sdStruct);
                         partRef.set("columns", sdStruct.get("cols"));
                         partRef.set("parameters", hivePart.getParameters());
-                        ITypedReferenceableInstance partRefTyped = createInstance(partRef);
+                        Referenceable partRefTyped = createInstance(partRef);
                         partitionInstances.add(partRefTyped.getId());
                     }
                 }
             }
-
-        } catch (NoSuchObjectException nsoe) {
-            throw new MetadataException(nsoe);
-        } catch (TException te) {
+        } catch (Exception te) {
             throw new MetadataException(te);
         }
 
     }
 
-    private ITypedStruct fillStorageDescStruct(StorageDescriptor storageDesc) throws MetadataException {
+    private ITypedStruct fillStorageDescStruct(StorageDescriptor storageDesc) throws Exception {
         String storageDescName = HiveTypeSystem.DefinedTypes.HIVE_STORAGEDESC.name();
 
         SerDeInfo serdeInfo = storageDesc.getSerdeInfo();
-        SkewedInfo skewedInfo = storageDesc.getSkewedInfo();
+        // SkewedInfo skewedInfo = storageDesc.getSkewedInfo();
 
         Struct sdStruct = new Struct(storageDescName);
 
@@ -275,7 +276,7 @@ public class HiveImporter {
 
 
 
-        List<ITypedReferenceableInstance> fieldsList = new ArrayList<>();
+        List<Referenceable> fieldsList = new ArrayList<>();
         Referenceable colRef;
         for (FieldSchema fs : storageDesc.getCols()) {
             LOG.debug("Processing field " + fs);
@@ -283,7 +284,7 @@ public class HiveImporter {
             colRef.set("name", fs.getName());
             colRef.set("type", fs.getType());
             colRef.set("comment", fs.getComment());
-            ITypedReferenceableInstance colRefTyped = createInstance(colRef);
+            Referenceable colRefTyped = createInstance(colRef);
             fieldsList.add(colRefTyped);
             columnInstances.add(colRefTyped.getId());
         }
@@ -315,8 +316,6 @@ public class HiveImporter {
         sdStruct.set("parameters", storageDesc.getParameters());
         sdStruct.set("storedAsSubDirectories", storageDesc.isStoredAsSubDirectories());
         StructType storageDesctype = (StructType) hiveTypeSystem.getDataType(storageDescName);
-        ITypedStruct sdStructTyped =
-                storageDesctype.convert(sdStruct, Multiplicity.OPTIONAL);
-        return sdStructTyped;
+        return storageDesctype.convert(sdStruct, Multiplicity.OPTIONAL);
     }
 }
diff --git a/hivetypes/src/main/java/org/apache/hadoop/metadata/hivetypes/HiveTypeSystem.java b/hivetypes/src/main/java/org/apache/hadoop/metadata/hivetypes/HiveTypeSystem.java
index 6f68cbc..3954dd5 100644
--- a/hivetypes/src/main/java/org/apache/hadoop/metadata/hivetypes/HiveTypeSystem.java
+++ b/hivetypes/src/main/java/org/apache/hadoop/metadata/hivetypes/HiveTypeSystem.java
@@ -19,10 +19,19 @@
 package org.apache.hadoop.metadata.hivetypes;
 
 import com.google.common.collect.ImmutableList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metadata.MetadataException;
-import org.apache.hadoop.metadata.types.*;
+import org.apache.hadoop.metadata.types.AttributeDefinition;
+import org.apache.hadoop.metadata.types.ClassType;
+import org.apache.hadoop.metadata.types.DataTypes;
+import org.apache.hadoop.metadata.types.EnumTypeDefinition;
+import org.apache.hadoop.metadata.types.EnumValue;
+import org.apache.hadoop.metadata.types.HierarchicalType;
+import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
+import org.apache.hadoop.metadata.types.IDataType;
+import org.apache.hadoop.metadata.types.Multiplicity;
+import org.apache.hadoop.metadata.types.StructTypeDefinition;
+import org.apache.hadoop.metadata.types.TraitType;
+import org.apache.hadoop.metadata.types.TypeSystem;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -40,8 +49,6 @@ public class HiveTypeSystem {
         public static final HiveTypeSystem instance = new HiveTypeSystem();
     }
 
-    private TypeSystem typeSystem;
-
     private boolean valid = false;
 
     public enum DefinedTypes {
@@ -85,7 +92,7 @@ public class HiveTypeSystem {
     private List<IDataType> enumTypes;
 
 
-    private static Multiplicity ZeroOrMore = new Multiplicity(0, Integer.MAX_VALUE, true);
+    // private static Multiplicity ZeroOrMore = new Multiplicity(0, Integer.MAX_VALUE, true);
 
     private HiveTypeSystem() {
         classTypeDefinitions = new HashMap<>();
@@ -98,7 +105,7 @@ public class HiveTypeSystem {
     private void initialize() throws MetadataException {
 
         LOG.info("Initializing the Hive Typesystem");
-        typeSystem = TypeSystem.getInstance();
+        TypeSystem typeSystem = TypeSystem.getInstance();
 
         mapStrToStrMap =
                 typeSystem.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE);
@@ -131,10 +138,16 @@ public class HiveTypeSystem {
         }
 
         typeMap.putAll(
-                typeSystem.defineTypes(getStructTypeDefinitions(), getTraitTypeDefinitions(), getClassTypeDefinitions()));
+                typeSystem.defineTypes(getStructTypeDefinitions(), getTraitTypeDefinitions(),
+                        getClassTypeDefinitions()));
+
+
         valid = true;
     }
 
+    Map<String, IDataType> getTypeMap() {
+        return typeMap;
+    }
 
     public synchronized static HiveTypeSystem getInstance() throws MetadataException {
         HiveTypeSystem hs = Holder.instance;
diff --git a/hivetypes/src/main/resources/hive-site.xml b/hivetypes/src/main/resources/hive-site.xml
index c372eff..4b82b8e 100644
--- a/hivetypes/src/main/resources/hive-site.xml
+++ b/hivetypes/src/main/resources/hive-site.xml
@@ -108,7 +108,7 @@
     
     <property>
       <name>hive.metastore.uris</name>
-      <value>thrift://localhost:9083</value>
+      <value>thrift://10.10.11.207:9083</value>
     </property>
     
     <property>
diff --git a/hivetypes/src/test/java/org/apache/hadoop/metadata/hivetypes/HiveGraphRepositoryTest.java b/hivetypes/src/test/java/org/apache/hadoop/metadata/hivetypes/HiveGraphRepositoryTest.java
index e01fe55..e16a743 100644
--- a/hivetypes/src/test/java/org/apache/hadoop/metadata/hivetypes/HiveGraphRepositoryTest.java
+++ b/hivetypes/src/test/java/org/apache/hadoop/metadata/hivetypes/HiveGraphRepositoryTest.java
@@ -19,19 +19,20 @@
 package org.apache.hadoop.metadata.hivetypes;
 
 
+import com.tinkerpop.blueprints.Edge;
+import com.tinkerpop.blueprints.Graph;
+import com.tinkerpop.blueprints.Vertex;
 import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.metadata.ITypedReferenceableInstance;
 import org.apache.hadoop.metadata.MetadataException;
 import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
+import org.apache.hadoop.metadata.repository.graph.GraphHelper;
 import org.apache.hadoop.metadata.repository.graph.GraphService;
 import org.apache.hadoop.metadata.repository.graph.TitanGraphProvider;
 import org.apache.hadoop.metadata.repository.graph.TitanGraphService;
-import org.apache.hadoop.metadata.types.TypeSystem;
+import org.junit.After;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
@@ -40,14 +41,14 @@ import org.slf4j.LoggerFactory;
 
 import java.io.BufferedWriter;
 import java.io.FileWriter;
-import java.io.IOException;
 import java.util.List;
 
 @Ignore
 public class HiveGraphRepositoryTest {
 
     protected HiveTypeSystem hts;
-    GraphBackedMetadataRepository repository;
+    private GraphBackedMetadataRepository repository;
+    private GraphService gs;
 
     private static final Logger LOG =
             LoggerFactory.getLogger(HiveGraphRepositoryTest.class);
@@ -55,14 +56,29 @@ public class HiveGraphRepositoryTest {
     @Before
     public void setup() throws ConfigurationException, MetadataException {
 
-        TypeSystem ts = TypeSystem.getInstance();
-        GraphService gs = new TitanGraphService(new TitanGraphProvider());
+        gs = new TitanGraphService(new TitanGraphProvider());
         repository = new GraphBackedMetadataRepository(gs);
         hts = HiveTypeSystem.getInstance();
     }
 
+    @After
+    public void tearDown() {
+        Graph graph = gs.getBlueprintsGraph();
+        System.out.println("*******************Graph Dump****************************");
+        System.out.println("Vertices of " + graph);
+        for (Vertex vertex : graph.getVertices()) {
+            System.out.println(GraphHelper.vertexString(vertex));
+        }
+
+        System.out.println("Edges of " + graph);
+        for (Edge edge : graph.getEdges()) {
+            System.out.println(GraphHelper.edgeString(edge));
+        }
+        System.out.println("*******************Graph Dump****************************");
+    }
+
     @Test
-    public void testHiveImport() throws MetaException, MetadataException, IOException {
+    public void testHiveImport() throws Exception {
 
         HiveImporter hImporter = new HiveImporter(repository, hts, new HiveMetaStoreClient(new HiveConf()));
         hImporter.importHiveMetadata();
diff --git a/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/Constants.java b/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/Constants.java
index 768318d..fe4d90b 100644
--- a/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/Constants.java
+++ b/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/Constants.java
@@ -23,8 +23,11 @@ final class Constants {
     private Constants() {
     }
 
-    static final String GUID_PROPERTY_KEY = "GUID";
-    static final String ENTITY_TYPE_PROPERTY_KEY = "typeName";
+    static final String GUID_PROPERTY_KEY = "guid";
+    static final String ENTITY_TYPE_PROPERTY_KEY = "type";
     static final String VERSION_PROPERTY_KEY = "version";
     static final String TIMESTAMP_PROPERTY_KEY = "timestamp";
+
+    static final String BACKING_INDEX = "search";
+    static final String INDEX_NAME = "metadata";
 }
diff --git a/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphBackedMetadataRepository.java b/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphBackedMetadataRepository.java
index 3532bf5..48d62e7 100644
--- a/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphBackedMetadataRepository.java
+++ b/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphBackedMetadataRepository.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.metadata.repository.graph;
 
+import com.thinkaurelius.titan.core.TitanGraph;
 import com.thinkaurelius.titan.core.TitanProperty;
 import com.thinkaurelius.titan.core.TitanVertex;
 import com.tinkerpop.blueprints.Direction;
@@ -82,10 +83,14 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
     private final GraphService graphService;
     private final TypeSystem typeSystem;
 
+    private final TitanGraph titanGraph;
+
     @Inject
     public GraphBackedMetadataRepository(GraphService graphService) throws MetadataException {
         this.graphService = graphService;
         this.typeSystem = TypeSystem.getInstance();
+
+        this.titanGraph = ((TitanGraphService) graphService).getTitanGraph();
     }
 
     /**
@@ -128,14 +133,14 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
 
             transactionalGraph.rollback();
 
-            return instanceToGraphMapper.mapTypedInstanceToGraph(typedInstance, transactionalGraph);
+            return instanceToGraphMapper.mapTypedInstanceToGraph(typedInstance);
 
         } catch (MetadataException e) {
             transactionalGraph.rollback();
             throw new RepositoryException(e);
         } finally {
             transactionalGraph.commit();
-            GraphUtils.dumpToLog(transactionalGraph);
+            GraphHelper.dumpToLog(transactionalGraph);
         }
     }
 
@@ -145,7 +150,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
 
         final Graph graph = graphService.getBlueprintsGraph();
         try {
-            Vertex instanceVertex = GraphUtils.findVertex(graph, Constants.GUID_PROPERTY_KEY, guid);
+            Vertex instanceVertex = GraphHelper.findVertexByGUID(graph, guid);
             if (instanceVertex == null) {
                 LOG.debug("Could not find a vertex for guid {}", guid);
                 return null;
@@ -157,7 +162,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
         } catch (Exception e) {
             throw new RepositoryException(e);
         } finally {
-            GraphUtils.dumpToLog(graph);
+            GraphHelper.dumpToLog(graph);
         }
     }
 
@@ -272,32 +277,39 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
                         idToNewIdMap.put(id, new Id(ID_SEQ.getAndIncrement(), 0, id.className));
                     }
 
-                    if (ref != null && idToInstanceMap.containsKey(id)) { // Oops
-                        throw new RepositoryException(String.format(
-                                "Unexpected internal error: Id %s processed again", id));
-                    }
-
                     if (ref != null) {
+                        if (idToInstanceMap.containsKey(id)) { // Oops
+                            throw new RepositoryException(String.format(
+                                    "Unexpected internal error: Id %s processed again", id));
+                        }
+
                         idToInstanceMap.put(id, ref);
                     }
                 }
             }
         }
 
-        public void createVerticesForClassTypes(TransactionalGraph transactionalGraph,
-                                                List<ITypedReferenceableInstance> newInstances) {
+        public void createVerticesForClassTypes(List<ITypedReferenceableInstance> newInstances) {
             for (ITypedReferenceableInstance typedInstance : newInstances) {
-                final Vertex instanceVertex =
-                        GraphUtils.createVertex(transactionalGraph, typedInstance);
-                idToVertexMap.put(typedInstance.getId(), instanceVertex);
+                final Id id = typedInstance.getId();
+                if (!idToVertexMap.containsKey(id)) {
+                    Vertex instanceVertex;
+                    if (id.isAssigned()) {
+                        instanceVertex = GraphHelper.findVertexByGUID(titanGraph, id.id);
+
+                    } else {
+                        instanceVertex = GraphHelper.createVertex(titanGraph, typedInstance);
+                    }
+
+                    idToVertexMap.put(id, instanceVertex);
+                }
             }
         }
     }
 
     private final class TypedInstanceToGraphMapper {
 
-        private String mapTypedInstanceToGraph(IReferenceableInstance typedInstance,
-                                               TransactionalGraph transactionalGraph)
+        private String mapTypedInstanceToGraph(IReferenceableInstance typedInstance)
             throws MetadataException {
 
             EntityProcessor entityProcessor = new EntityProcessor();
@@ -309,7 +321,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
             }
 
             List<ITypedReferenceableInstance> newTypedInstances = discoverInstances(entityProcessor);
-            entityProcessor.createVerticesForClassTypes(transactionalGraph, newTypedInstances);
+            entityProcessor.createVerticesForClassTypes(newTypedInstances);
             return addDiscoveredInstances(typedInstance, entityProcessor, newTypedInstances);
         }
 
@@ -365,7 +377,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
                 // add the attributes for the instance
                 final Map<String, AttributeInfo> fields = typedInstance.fieldMapping().fields;
 
-                mapInstanceToVertex(typedInstance, instanceVertex, fields, entityProcessor.idToVertexMap);
+                mapInstanceToVertex(
+                        id, typedInstance, instanceVertex, fields, entityProcessor.idToVertexMap);
 
                 for (String traitName : typedInstance.getTraits()) {
                     LOG.debug("mapping trait {}", traitName);
@@ -385,7 +398,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
             return typedInstanceGUID;
         }
 
-        private void mapInstanceToVertex(ITypedInstance typedInstance, Vertex instanceVertex,
+        private void mapInstanceToVertex(Id id, ITypedInstance typedInstance, Vertex instanceVertex,
                                          Map<String, AttributeInfo> fields,
                                          Map<Id, Vertex> idToVertexMap) throws MetadataException {
             LOG.debug("Mapping instance {} to vertex {} for fields {}",
@@ -393,75 +406,91 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
             for (AttributeInfo attributeInfo : fields.values()) {
                 LOG.debug("mapping attributeInfo {}", attributeInfo);
                 final IDataType dataType = attributeInfo.dataType();
-                final String vertexPropertyName =
-                        typedInstance.getTypeName() + "." + attributeInfo.name;
+                mapAttributesToVertex(id, typedInstance, instanceVertex,
+                        idToVertexMap, attributeInfo, dataType);
+            }
+        }
 
-                switch (dataType.getTypeCategory()) {
-                    case PRIMITIVE:
-                        mapPrimitiveToVertex(typedInstance, instanceVertex, attributeInfo);
-                        break;
+        private void mapAttributesToVertex(Id id, ITypedInstance typedInstance,
+                                           Vertex instanceVertex,
+                                           Map<Id, Vertex> idToVertexMap,
+                                           AttributeInfo attributeInfo,
+                                           IDataType dataType) throws MetadataException {
+            final String propertyName = typedInstance.getTypeName() + "." + attributeInfo.name;
 
-                    case ENUM:
-                        instanceVertex.setProperty(vertexPropertyName,
-                                typedInstance.getInt(attributeInfo.name));
-                        break;
+            switch (dataType.getTypeCategory()) {
+                case PRIMITIVE:
+                    mapPrimitiveToVertex(typedInstance, instanceVertex, attributeInfo);
+                    break;
 
-                    case ARRAY:
-                        // todo - Add to/from json for collections
-                        break;
+                case ENUM:
+                    instanceVertex.setProperty(propertyName,
+                            typedInstance.getInt(attributeInfo.name));
+                    break;
 
-                    case MAP:
-                        // todo - Add to/from json for collections
-                        break;
+                case ARRAY:
+                    // todo - Add to/from json for collections
+                    break;
 
-                    case STRUCT:
-                        mapStructInstanceToVertex(typedInstance, instanceVertex,
-                                attributeInfo, idToVertexMap);
-                        break;
+                case MAP:
+                    // todo - Add to/from json for collections
+                    break;
 
-                    case TRAIT:
-                        // do NOTHING - this is taken care of earlier
-                        break;
+                case STRUCT:
+                    Vertex structInstanceVertex = mapStructInstanceToVertex(id,
+                            (ITypedStruct) typedInstance.get(attributeInfo.name),
+                            attributeInfo, idToVertexMap);
+                    // add an edge to the newly created vertex from the parent
+                    GraphHelper.addEdge(instanceVertex, structInstanceVertex, propertyName);
+                    break;
 
-                    case CLASS:
-                        Id id = (Id) typedInstance.get(attributeInfo.name);
-                        if (id != null) {
-                            Vertex referenceVertex = idToVertexMap.get(id);
-                            String relationshipLabel =
-                                    typedInstance.getTypeName() + "." + attributeInfo.name;
-                            LOG.debug("Adding edge {} -> label {} -> v{}",
-                                    instanceVertex, relationshipLabel, referenceVertex);
-                            instanceVertex.addEdge(relationshipLabel, referenceVertex);
-                        }
-                        break;
+                case TRAIT:
+                    // do NOTHING - this is taken care of earlier
+                    break;
 
-                    default:
-                        break;
-                }
+                case CLASS:
+                    Id referenceId = (Id) typedInstance.get(attributeInfo.name);
+                    mapClassReferenceAsEdge(
+                            instanceVertex, idToVertexMap, propertyName, referenceId);
+                    break;
+
+                default:
+                    break;
             }
         }
 
-        private void mapStructInstanceToVertex(ITypedInstance typedInstance,
-                                               Vertex parentInstanceVertex,
-                                               AttributeInfo attributeInfo,
-                                               Map<Id, Vertex> idToVertexMap) throws MetadataException {
-            ITypedStruct structInstance = (ITypedStruct) typedInstance.get(attributeInfo.name);
+        private void mapClassReferenceAsEdge(Vertex instanceVertex, Map<Id, Vertex> idToVertexMap,
+                                             String propertyKey, Id id) {
+            if (id != null) {
+                Vertex referenceVertex;
+                if (id.isAssigned()) {
+                    referenceVertex = GraphHelper.findVertexByGUID(titanGraph, id.id);
+                } else {
+                    referenceVertex = idToVertexMap.get(id);
+                }
+
+                if (referenceVertex != null) {
+                    // add an edge to the class vertex from the instance
+                    GraphHelper.addEdge(instanceVertex, referenceVertex, propertyKey);
+                } else { // Oops - todo - throw an exception?
+                    System.out.println("BOOOOO = " + id);
+                }
+            }
+        }
 
+        private Vertex mapStructInstanceToVertex(Id id, ITypedStruct structInstance,
+                                                 AttributeInfo attributeInfo,
+                                                 Map<Id, Vertex> idToVertexMap) throws MetadataException {
             // add a new vertex for the struct or trait instance
-            Vertex structInstanceVertex = GraphUtils.createVertex(
-                    graphService.getBlueprintsGraph(), structInstance,
-                    ((ITypedReferenceableInstance) typedInstance).getId());
+            Vertex structInstanceVertex = GraphHelper.createVertex(
+                    graphService.getBlueprintsGraph(), structInstance.getTypeName(), id);
             LOG.debug("created vertex {} for struct {}", structInstanceVertex, attributeInfo.name);
 
             // map all the attributes to this newly created vertex
-            mapInstanceToVertex(structInstance, structInstanceVertex,
+            mapInstanceToVertex(id, structInstance, structInstanceVertex,
                     structInstance.fieldMapping().fields, idToVertexMap);
 
-            // add an edge to the newly created vertex from the parent
-            String relationshipLabel = typedInstance.getTypeName() + "." + attributeInfo.name;
-            LOG.debug("Adding edge for {} -> struct label {} -> v{}",
-                    parentInstanceVertex, relationshipLabel, structInstanceVertex);
-            parentInstanceVertex.addEdge(relationshipLabel, structInstanceVertex);
+            return structInstanceVertex;
         }
 
         private void mapTraitInstanceToVertex(String traitName, ITypedStruct traitInstance,
@@ -469,19 +498,17 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
                                               Vertex parentInstanceVertex,
                                               Map<Id, Vertex> idToVertexMap) throws MetadataException {
             // add a new vertex for the struct or trait instance
-            Vertex traitInstanceVertex = GraphUtils.createVertex(
+            Vertex traitInstanceVertex = GraphHelper.createVertex(
                     graphService.getBlueprintsGraph(), traitInstance, typedInstance.getId());
             LOG.debug("created vertex {} for trait {}", traitInstanceVertex, traitName);
 
             // map all the attributes to this newly created vertex
-            mapInstanceToVertex(traitInstance, traitInstanceVertex,
+            mapInstanceToVertex(typedInstance.getId(), traitInstance, traitInstanceVertex,
                     traitInstance.fieldMapping().fields, idToVertexMap);
 
             // add an edge to the newly created vertex from the parent
             String relationshipLabel = typedInstance.getTypeName() + "." + traitName;
-            LOG.debug("Adding edge for {} -> trait label {} -> v{}",
-                    parentInstanceVertex, relationshipLabel, traitInstanceVertex);
-            parentInstanceVertex.addEdge(relationshipLabel, traitInstanceVertex);
+            GraphHelper.addEdge(parentInstanceVertex, traitInstanceVertex, relationshipLabel);
         }
 
         private void mapPrimitiveToVertex(ITypedInstance typedInstance,
@@ -528,6 +555,23 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
         }
     }
 
+/*
+    private PropertyKey getPropertyKey(DataTypes.ArrayType arrayType, String propertyName) {
+        PropertyKey propertyKey = titanGraph.getPropertyKey(propertyName);
+        if (propertyKey == null) { //First time we have seen it, let's define it
+            propertyKey = management.makePropertyKey(propertyName)
+                    .dataType(arrayType.getElemType().getClass())
+                    .cardinality(Cardinality.LIST)
+                    .make();
+            management.buildIndex("by" + propertyName, Vertex.class)
+                    .addKey(propertyKey)
+                    .buildCompositeIndex();
+        }
+
+        return propertyKey;
+    }
+*/
+
     private final class GraphToTypedInstanceMapper {
 
         private ITypedReferenceableInstance mapGraphToTypedInstance(String guid,
diff --git a/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphHelper.java b/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphHelper.java
new file mode 100644
index 0000000..2799326
--- /dev/null
+++ b/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphHelper.java
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metadata.repository.graph;
+
+import com.tinkerpop.blueprints.Compare;
+import com.tinkerpop.blueprints.Direction;
+import com.tinkerpop.blueprints.Edge;
+import com.tinkerpop.blueprints.Graph;
+import com.tinkerpop.blueprints.GraphQuery;
+import com.tinkerpop.blueprints.Vertex;
+import org.apache.hadoop.metadata.ITypedInstance;
+import org.apache.hadoop.metadata.ITypedReferenceableInstance;
+import org.apache.hadoop.metadata.storage.Id;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Iterator;
+import java.util.UUID;
+
+/**
+ * Utility class for graph operations.
+ */
+public final class GraphHelper {
+
+    private static final Logger LOG = LoggerFactory.getLogger(GraphHelper.class);
+
+    private GraphHelper() {
+    }
+
+    public static Vertex createVertex(Graph graph,
+                                      ITypedReferenceableInstance typedInstance) {
+        return createVertex(graph, typedInstance, typedInstance.getId());
+    }
+
+    public static Vertex createVertex(Graph graph,
+                                      ITypedInstance typedInstance,
+                                      Id typedInstanceId) {
+        return createVertex(graph, typedInstance.getTypeName(), typedInstanceId);
+    }
+
+    public static Vertex createVertex(Graph graph,
+                                      String typeName,
+                                      Id typedInstanceId) {
+        final Vertex instanceVertex = graph.addVertex(null);
+        // type
+        instanceVertex.setProperty(Constants.ENTITY_TYPE_PROPERTY_KEY, typeName);
+
+        // id
+        final String guid = UUID.randomUUID().toString();
+        instanceVertex.setProperty(Constants.GUID_PROPERTY_KEY, guid);
+
+        // version
+        instanceVertex.setProperty(Constants.VERSION_PROPERTY_KEY, typedInstanceId.version);
+
+        return instanceVertex;
+    }
+
+    public static Edge addEdge(Vertex fromVertex, Vertex toVertex, String edgeLabel) {
+        LOG.debug("Adding edge for {} -> struct label {} -> v{}",
+                fromVertex, edgeLabel, toVertex);
+        return fromVertex.addEdge(edgeLabel, toVertex);
+    }
+
+    public static Vertex findVertexByGUID(Graph blueprintsGraph,
+                                          String value) {
+        LOG.debug("Finding vertex for key={}, value={}", Constants.GUID_PROPERTY_KEY, value);
+
+        GraphQuery query = blueprintsGraph.query()
+                .has(Constants.GUID_PROPERTY_KEY, Compare.EQUAL, value);
+        Iterator<Vertex> results = query.vertices().iterator();
+        // returning one since guid should be unique
+        return results.hasNext() ? results.next() : null;
+    }
+
+    public static String vertexString(final Vertex vertex) {
+        StringBuilder properties = new StringBuilder();
+        for (String propertyKey : vertex.getPropertyKeys()) {
+            properties.append(propertyKey)
+                    .append("=").append(vertex.getProperty(propertyKey))
+                    .append(", ");
+        }
+
+        return "v[" + vertex.getId() + "], Properties[" + properties + "]";
+    }
+
+    public static String edgeString(final Edge edge) {
+        return "e[" + edge.getLabel() + "], ["
+                + edge.getVertex(Direction.OUT)
+                + " -> " + edge.getLabel() + " -> "
+                + edge.getVertex(Direction.IN)
+                + "]";
+    }
+
+    public static void dumpToLog(final Graph graph) {
+        LOG.debug("*******************Graph Dump****************************");
+        LOG.debug("Vertices of {}", graph);
+        for (Vertex vertex : graph.getVertices()) {
+            LOG.debug(vertexString(vertex));
+        }
+
+        LOG.debug("Edges of {}", graph);
+        for (Edge edge : graph.getEdges()) {
+            LOG.debug(edgeString(edge));
+        }
+        LOG.debug("*******************Graph Dump****************************");
+    }
+}
\ No newline at end of file
diff --git a/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphUtils.java b/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphUtils.java
deleted file mode 100644
index f2ffc15..0000000
--- a/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphUtils.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.repository.graph;
-
-import com.tinkerpop.blueprints.Direction;
-import com.tinkerpop.blueprints.Edge;
-import com.tinkerpop.blueprints.Graph;
-import com.tinkerpop.blueprints.GraphQuery;
-import com.tinkerpop.blueprints.Vertex;
-import org.apache.hadoop.metadata.ITypedInstance;
-import org.apache.hadoop.metadata.ITypedReferenceableInstance;
-import org.apache.hadoop.metadata.storage.Id;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Iterator;
-import java.util.UUID;
-
-/**
- * Utility class for graph operations.
- */
-public final class GraphUtils {
-
-    private static final Logger LOG = LoggerFactory.getLogger(GraphUtils.class);
-
-    private GraphUtils() {
-    }
-
-    public static Vertex createVertex(Graph graph,
-                                      ITypedReferenceableInstance typedInstance) {
-        return createVertex(graph, typedInstance, typedInstance.getId());
-    }
-
-    public static Vertex createVertex(Graph graph,
-                                      ITypedInstance typedInstance,
-                                      Id typedInstanceId) {
-        final Vertex instanceVertex = graph.addVertex(null);
-        // type
-        instanceVertex.setProperty(Constants.ENTITY_TYPE_PROPERTY_KEY, typedInstance.getTypeName());
-
-        // id
-        final String guid = UUID.randomUUID().toString();
-        instanceVertex.setProperty(Constants.GUID_PROPERTY_KEY, guid);
-
-        // version
-        instanceVertex.setProperty(Constants.VERSION_PROPERTY_KEY, typedInstanceId.version);
-
-        return instanceVertex;
-    }
-
-    public static Vertex findVertex(Graph blueprintsGraph,
-                                    String key, String value) {
-        LOG.debug("Finding vertex for key={}, value={}", key, value);
-
-        GraphQuery query = blueprintsGraph.query().has(key, value);
-        Iterator<Vertex> results = query.vertices().iterator();
-        // returning one since name/type is unique
-        return results.hasNext() ? results.next() : null;
-    }
-
-    public static String vertexString(final Vertex vertex) {
-        StringBuilder properties = new StringBuilder();
-        for (String propertyKey : vertex.getPropertyKeys()) {
-            properties.append(propertyKey)
-                    .append("=").append(vertex.getProperty(propertyKey))
-                    .append(", ");
-        }
-
-        return "v[" + vertex.getId() + "], Properties[" + properties + "]";
-    }
-
-    public static String edgeString(final Edge edge) {
-        return "e[" + edge.getLabel() + "], ["
-                + edge.getVertex(Direction.OUT)
-                + " -> " + edge.getLabel() + " -> "
-                + edge.getVertex(Direction.IN)
-                + "]";
-    }
-
-    public static void dumpToLog(final Graph graph) {
-        LOG.debug("*******************Graph Dump****************************");
-        LOG.debug("Vertices of {}", graph);
-        for (Vertex vertex : graph.getVertices()) {
-            LOG.debug(vertexString(vertex));
-        }
-
-        LOG.debug("Edges of {}", graph);
-        for (Edge edge : graph.getEdges()) {
-            LOG.debug(edgeString(edge));
-        }
-        LOG.debug("*******************Graph Dump****************************");
-    }
-}
\ No newline at end of file
diff --git a/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/TitanGraphService.java b/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/TitanGraphService.java
index 7667c33..c116ef3 100644
--- a/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/TitanGraphService.java
+++ b/repository/src/main/java/org/apache/hadoop/metadata/repository/graph/TitanGraphService.java
@@ -28,6 +28,7 @@ import java.util.Set;
 import javax.inject.Inject;
 import javax.inject.Singleton;
 
+import com.thinkaurelius.titan.core.PropertyKey;
 import org.apache.commons.configuration.Configuration;
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.commons.configuration.PropertiesConfiguration;
@@ -62,7 +63,7 @@ public class TitanGraphService implements GraphService {
 	/**
 	 * Initialize this service through injection with a custom Provider.
 	 * 
-	 * @param graph
+	 * @param graph graph implementation
 	 * @throws ConfigurationException
 	 */
 	@Inject
@@ -123,10 +124,23 @@ public class TitanGraphService implements GraphService {
 		LOG.info("Indexes do not exist, Creating indexes for titanGraph using indexer.properties.");
 
 		TitanManagement mgmt = titanGraph.getManagementSystem();
-		mgmt.buildIndex("mainIndex", Vertex.class).buildMixedIndex("search");
-		TitanGraphIndex graphIndex = mgmt.getGraphIndex("mainIndex");
-
-        mgmt.addIndexKey(graphIndex, mgmt.makePropertyKey("guid").dataType(String.class).make());
+		TitanGraphIndex graphIndex =  mgmt.buildIndex(Constants.INDEX_NAME, Vertex.class)
+				.buildMixedIndex(Constants.BACKING_INDEX);
+
+		PropertyKey guidKey = mgmt
+				.makePropertyKey(Constants.GUID_PROPERTY_KEY)
+				.dataType(String.class).make();
+		mgmt.buildIndex("byGUID", Vertex.class)
+				.addKey(guidKey)
+				.unique()
+				.buildCompositeIndex();
+
+		PropertyKey typeKey = mgmt
+				.makePropertyKey(Constants.ENTITY_TYPE_PROPERTY_KEY)
+				.dataType(String.class).make();
+		mgmt.buildIndex("byType", Vertex.class)
+				.addKey(typeKey)
+				.buildCompositeIndex();
 
         Configuration indexConfig = getConfiguration("indexer.properties", INDEXER_PREFIX);
         // Properties are formatted: prop_name:type;prop_name:type
@@ -134,7 +148,7 @@ public class TitanGraphService implements GraphService {
 		if (!indexConfig.isEmpty()) {
 
 			// Get a list of property names to iterate through...
-			List<String> propList = new ArrayList<String>();
+			List<String> propList = new ArrayList<>();
 
 			Iterator<String> it = indexConfig.getKeys("property.name");
 
@@ -147,9 +161,9 @@ public class TitanGraphService implements GraphService {
 
 				// Pull the property name and index, so we can register the name
 				// and look up the type.
-				String prop = it.next().toString();
+				String prop = it.next();
 				String index = prop.substring(prop.lastIndexOf(".") + 1);
-				String type = null;
+				String type;
 				prop = indexConfig.getProperty(prop).toString();
 
 				// Look up the type for the specified property name.
diff --git a/repository/src/test/java/org/apache/hadoop/metadata/repository/graph/GraphRepoMapperTest.java b/repository/src/test/java/org/apache/hadoop/metadata/repository/graph/GraphRepoMapperTest.java
new file mode 100644
index 0000000..646d001
--- /dev/null
+++ b/repository/src/test/java/org/apache/hadoop/metadata/repository/graph/GraphRepoMapperTest.java
@@ -0,0 +1,196 @@
+package org.apache.hadoop.metadata.repository.graph;
+
+import com.google.common.collect.ImmutableList;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.tinkerpop.blueprints.Edge;
+import com.tinkerpop.blueprints.Vertex;
+import org.apache.hadoop.metadata.ITypedReferenceableInstance;
+import org.apache.hadoop.metadata.Referenceable;
+import org.apache.hadoop.metadata.RepositoryModuleBaseTest;
+import org.apache.hadoop.metadata.Struct;
+import org.apache.hadoop.metadata.types.AttributeDefinition;
+import org.apache.hadoop.metadata.types.ClassType;
+import org.apache.hadoop.metadata.types.DataTypes;
+import org.apache.hadoop.metadata.types.EnumTypeDefinition;
+import org.apache.hadoop.metadata.types.EnumValue;
+import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
+import org.apache.hadoop.metadata.types.IDataType;
+import org.apache.hadoop.metadata.types.Multiplicity;
+import org.apache.hadoop.metadata.types.StructTypeDefinition;
+import org.apache.hadoop.metadata.types.TraitType;
+import org.apache.hadoop.metadata.types.TypeSystem;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+public class GraphRepoMapperTest extends RepositoryModuleBaseTest {
+
+    private static final String DATABASE_TYPE = "hive_database";
+    private static final String DATABASE_NAME = "foo";
+    private static final String TABLE_TYPE = "hive_table";
+    private static final String TABLE_NAME = "bar";
+
+    private TitanGraphService titanGraphService;
+    private GraphBackedMetadataRepository repositoryService;
+    private TypeSystem typeSystem;
+
+    @BeforeClass
+    public void setUp() throws Exception {
+        titanGraphService = super.injector.getInstance(TitanGraphService.class);
+        titanGraphService.start();
+
+        repositoryService = super.injector.getInstance(GraphBackedMetadataRepository.class);
+        repositoryService.start();
+
+        typeSystem = TypeSystem.getInstance();
+
+        createHiveTypes();
+    }
+
+    @Test
+    public void testSubmitEntity() throws Exception {
+        Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
+        databaseInstance.set("name", DATABASE_NAME);
+        databaseInstance.set("description", "foo database");
+        System.out.println("databaseInstance = " + databaseInstance);
+
+        ClassType dbType = typeSystem.getDataType(ClassType.class, DATABASE_TYPE);
+        ITypedReferenceableInstance db = dbType.convert(databaseInstance, Multiplicity.REQUIRED);
+        System.out.println("db = " + db);
+
+        String dbGUID = repositoryService.createEntity(db, DATABASE_TYPE);
+        System.out.println("added db = " + dbGUID);
+
+        Referenceable dbInstance = new Referenceable(
+                dbGUID, DATABASE_TYPE, databaseInstance.getValuesMap());
+
+        ITypedReferenceableInstance table = createHiveTableInstance(dbInstance);
+        String tableGUID = repositoryService.createEntity(table, TABLE_TYPE);
+        System.out.println("added table = " + tableGUID);
+
+        dumpGraph();
+    }
+
+    private void dumpGraph() {
+        TitanGraph graph = titanGraphService.getTitanGraph();
+        System.out.println("*******************Graph Dump****************************");
+        System.out.println("Vertices of " + graph);
+        for (Vertex vertex : graph.getVertices()) {
+            System.out.println(GraphHelper.vertexString(vertex));
+        }
+
+        System.out.println("Edges of " + graph);
+        for (Edge edge : graph.getEdges()) {
+            System.out.println(GraphHelper.edgeString(edge));
+        }
+        System.out.println("*******************Graph Dump****************************");
+    }
+
+    private void createHiveTypes() throws Exception {
+        HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
+                createClassTypeDef(DATABASE_TYPE,
+                        ImmutableList.<String>of(),
+                        createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        createRequiredAttrDef("description", DataTypes.STRING_TYPE));
+
+        StructTypeDefinition structTypeDefinition =
+                new StructTypeDefinition("serdeType",
+                        new AttributeDefinition[] {
+                                createRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                                createRequiredAttrDef("serde", DataTypes.STRING_TYPE)
+                        });
+
+        EnumValue values[] = {
+                new EnumValue("MANAGED", 1),
+                new EnumValue("EXTERNAL", 2),
+        };
+
+        EnumTypeDefinition enumTypeDefinition =  new EnumTypeDefinition("tableType", values);
+        typeSystem.defineEnumType(enumTypeDefinition);
+
+        HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
+                createClassTypeDef(TABLE_TYPE,
+                        ImmutableList.<String>of(),
+                        createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        createRequiredAttrDef("description", DataTypes.STRING_TYPE),
+                        createRequiredAttrDef("type", DataTypes.STRING_TYPE),
+                        new AttributeDefinition("tableType", "tableType",
+                                Multiplicity.REQUIRED, false, null),
+/*
+                        new AttributeDefinition("columns",
+                                String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
+                                Multiplicity.COLLECTION, false, null),
+*/
+                        new AttributeDefinition("serde1",
+                                "serdeType", Multiplicity.REQUIRED, false, null),
+                        new AttributeDefinition("serde2",
+                                "serdeType", Multiplicity.REQUIRED, false, null),
+                        new AttributeDefinition("database",
+                                DATABASE_TYPE, Multiplicity.REQUIRED, true, null));
+
+        HierarchicalTypeDefinition<TraitType> classificationTypeDefinition =
+                createTraitTypeDef("classification",
+                        ImmutableList.<String>of(),
+                        createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
+
+        typeSystem.defineTypes(
+                ImmutableList.of(structTypeDefinition),
+                ImmutableList.of(classificationTypeDefinition),
+                ImmutableList.of(databaseTypeDefinition, tableTypeDefinition));
+    }
+
+    private ITypedReferenceableInstance createHiveTableInstance(
+            Referenceable databaseInstance) throws Exception {
+/*
+        Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
+        databaseInstance.set("name", DATABASE_NAME);
+        databaseInstance.set("description", "foo database");
+*/
+
+        Referenceable tableInstance = new Referenceable(TABLE_TYPE, "classification");
+        tableInstance.set("name", TABLE_NAME);
+        tableInstance.set("description", "bar table");
+        tableInstance.set("type", "managed");
+        tableInstance.set("tableType", 1); // enum
+        tableInstance.set("database", databaseInstance);
+
+        Struct traitInstance = (Struct) tableInstance.getTrait("classification");
+        traitInstance.set("tag", "foundation_etl");
+
+        Struct serde1Instance = new Struct("serdeType");
+        serde1Instance.set("name", "serde1");
+        serde1Instance.set("serde", "serde1");
+        tableInstance.set("serde1", serde1Instance);
+
+        Struct serde2Instance = new Struct("serdeType");
+        serde2Instance.set("name", "serde2");
+        serde2Instance.set("serde", "serde2");
+        tableInstance.set("serde2", serde2Instance);
+
+        ClassType tableType = typeSystem.getDataType(ClassType.class, TABLE_TYPE);
+        return tableType.convert(tableInstance, Multiplicity.REQUIRED);
+    }
+
+    protected AttributeDefinition createUniqueRequiredAttrDef(String name,
+                                                              IDataType dataType) {
+        return new AttributeDefinition(name, dataType.getName(),
+                Multiplicity.REQUIRED, false, true, true, null);
+    }
+
+    protected AttributeDefinition createRequiredAttrDef(String name,
+                                                        IDataType dataType) {
+        return new AttributeDefinition(name, dataType.getName(),
+                Multiplicity.REQUIRED, false, null);
+    }
+
+    @SuppressWarnings("unchecked")
+    protected HierarchicalTypeDefinition<TraitType> createTraitTypeDef(
+            String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
+        return new HierarchicalTypeDefinition(TraitType.class, name, superTypes, attrDefs);
+    }
+
+    @SuppressWarnings("unchecked")
+    protected HierarchicalTypeDefinition<ClassType> createClassTypeDef(
+            String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
+        return new HierarchicalTypeDefinition(ClassType.class, name, superTypes, attrDefs);
+    }
+}
diff --git a/repository/src/test/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryTest.java b/repository/src/test/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryTest.java
index 6258e43..7d329f4 100644
--- a/repository/src/test/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryTest.java
+++ b/repository/src/test/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryTest.java
@@ -1,22 +1,38 @@
-package org.apache.hadoop.metadata.services;
-
-import java.util.List;
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
-import javax.inject.Inject;
+package org.apache.hadoop.metadata.services;
 
+import com.google.common.collect.ImmutableList;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.tinkerpop.blueprints.Direction;
+import com.tinkerpop.blueprints.Edge;
+import com.tinkerpop.blueprints.Vertex;
 import org.apache.hadoop.metadata.ITypedReferenceableInstance;
 import org.apache.hadoop.metadata.MetadataException;
 import org.apache.hadoop.metadata.Referenceable;
 import org.apache.hadoop.metadata.RepositoryMetadataModule;
 import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
-import org.apache.hadoop.metadata.repository.graph.GraphUtils;
+import org.apache.hadoop.metadata.repository.graph.GraphHelper;
 import org.apache.hadoop.metadata.repository.graph.TitanGraphService;
-import org.apache.hadoop.metadata.storage.IRepository;
-import org.apache.hadoop.metadata.storage.memory.MemRepository;
 import org.apache.hadoop.metadata.types.AttributeDefinition;
 import org.apache.hadoop.metadata.types.ClassType;
 import org.apache.hadoop.metadata.types.DataTypes;
-import org.apache.hadoop.metadata.types.HierarchicalType;
 import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
 import org.apache.hadoop.metadata.types.IDataType;
 import org.apache.hadoop.metadata.types.Multiplicity;
@@ -24,16 +40,11 @@ import org.apache.hadoop.metadata.types.StructTypeDefinition;
 import org.apache.hadoop.metadata.types.TraitType;
 import org.apache.hadoop.metadata.types.TypeSystem;
 import org.testng.Assert;
-import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
-import com.google.common.collect.ImmutableList;
-import com.thinkaurelius.titan.core.TitanGraph;
-import com.tinkerpop.blueprints.Direction;
-import com.tinkerpop.blueprints.Edge;
-import com.tinkerpop.blueprints.Vertex;
+import java.util.List;
 
 /**
  * GraphBackedMetadataRepository test
@@ -41,18 +52,17 @@ import com.tinkerpop.blueprints.Vertex;
  * Guice loads the dependencies and injects the necessary objects
  *
  */
-@Test (enabled = false)
+@Test
 @Guice(modules = RepositoryMetadataModule.class)
 public class GraphBackedMetadataRepositoryTest {
 
-    private static final String ENTITY_TYPE = "hive-table";
+    private static final String ENTITY_TYPE = "Department";
 
     @Inject
     TitanGraphService titanGraphService;
     @Inject
     GraphBackedMetadataRepository repositoryService;
     
-    private IRepository repo;
     private TypeSystem ts;
     private String guid;
 
@@ -69,11 +79,7 @@ public class GraphBackedMetadataRepositoryTest {
         defineDeptEmployeeTypes(ts);
     }
 
-    @AfterClass
-    public void tearDown() throws Exception {
-    }
-
-    @Test (enabled = false)
+    @Test
     public void testSubmitEntity() throws Exception {
         Referenceable hrDept = createDeptEg1(ts);
         ClassType deptType = ts.getDataType(ClassType.class, "Department");
@@ -88,34 +94,33 @@ public class GraphBackedMetadataRepositoryTest {
     private void dumpGraph() {
         TitanGraph graph = titanGraphService.getTitanGraph();
         for (Vertex v : graph.getVertices()) {
-            // System.out.println("****v = " + GraphUtils.vertexString(v));
-            System.out.println("v = " + v);
+            System.out.println("****v = " + GraphHelper.vertexString(v));
             for (Edge e : v.getEdges(Direction.OUT)) {
-                System.out.println("****e = " + GraphUtils.edgeString(e));
+                System.out.println("****e = " + GraphHelper.edgeString(e));
             }
         }
     }
 
-    @Test(dependsOnMethods = "testSubmitEntity", enabled = false)
+    @Test(dependsOnMethods = "testSubmitEntity")
     public void testGetEntityDefinition() throws Exception {
         ITypedReferenceableInstance entity = repositoryService.getEntityDefinition(guid);
         Assert.assertNotNull(entity);
     }
 
-    @Test (enabled = false)
+    @Test
     public void testGetEntityDefinitionNonExistent() throws Exception {
         ITypedReferenceableInstance entity = repositoryService.getEntityDefinition("blah");
         Assert.assertNull(entity);
     }
 
-    @Test (enabled = false)
+    @Test
     public void testGetEntityList() throws Exception {
         List<String> entityList = repositoryService.getEntityList(ENTITY_TYPE);
         Assert.assertNotNull(entityList);
-        Assert.assertEquals(entityList.size(), 0); // as this is not implemented yet
+        Assert.assertEquals(entityList.size(), 1); // one department
     }
 
-    @Test (enabled = false)
+    @Test
     public void testRawSearch1() throws Exception {
         Referenceable hrDept = createDeptEg1(ts);
         ClassType deptType = ts.getDataType(ClassType.class, "Department");
@@ -123,22 +128,19 @@ public class GraphBackedMetadataRepositoryTest {
 
         guid = repositoryService.createEntity(hrDept2, ENTITY_TYPE);
 
-
         // Query for all Vertices in Graph
         Object r = repositoryService.searchByGremlin("g.V.toList()");
-        //System.out.println(r);
+        System.out.println("search result = " + r);
 
         // Query for all Vertices of a Type
         r = repositoryService.searchByGremlin("g.V.filter{it.typeName == 'Department'}.toList()");
-        //System.out.println(r);
+        System.out.println("search result = " + r);
 
         // Property Query: list all Person names
         r = repositoryService.searchByGremlin("g.V.filter{it.typeName == 'Person'}.'Person.name'.toList()");
-        //System.out.println(r);
+        System.out.println("search result = " + r);
     }
 
-
-
     /*
      * Class Hierarchy is:
      *   Department(name : String, employees : Array[Person])
@@ -167,7 +169,7 @@ public class GraphBackedMetadataRepositoryTest {
         );
 
         HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager",
-                ImmutableList.<String>of("Person"),
+                ImmutableList.of("Person"),
                 new AttributeDefinition("subordinates",
                         String.format("array<%s>", "Person"), Multiplicity.COLLECTION, false,
                         "manager")
@@ -180,18 +182,8 @@ public class GraphBackedMetadataRepositoryTest {
         );
 
         ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
-                ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(securityClearanceTypeDef),
-                ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef,
-                        managerTypeDef));
-
-        ImmutableList<HierarchicalType> types = ImmutableList.of(
-                ts.getDataType(HierarchicalType.class, "SecurityClearance"),
-                ts.getDataType(ClassType.class, "Department"),
-                ts.getDataType(ClassType.class, "Person"),
-                ts.getDataType(ClassType.class, "Manager")
-        );
-
-        repo.defineTypes(types);
+                ImmutableList.of(securityClearanceTypeDef),
+                ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
     }
 
     protected Referenceable createDeptEg1(TypeSystem ts) throws MetadataException {
@@ -207,14 +199,15 @@ public class GraphBackedMetadataRepositoryTest {
 
         john.set("manager", jane);
 
-        hrDept.set("employees", ImmutableList.<Referenceable>of(john, jane));
+        hrDept.set("employees", ImmutableList.of(john, jane));
 
-        jane.set("subordinates", ImmutableList.<Referenceable>of(john));
+        jane.set("subordinates", ImmutableList.of(john));
 
         jane.getTrait("SecurityClearance").set("level", 1);
 
         ClassType deptType = ts.getDataType(ClassType.class, "Department");
         ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
+        Assert.assertNotNull(hrDept2);
 
         return hrDept;
     }
diff --git a/typesystem/src/main/java/org/apache/hadoop/metadata/Referenceable.java b/typesystem/src/main/java/org/apache/hadoop/metadata/Referenceable.java
index 21bc13d..e519fbc 100644
--- a/typesystem/src/main/java/org/apache/hadoop/metadata/Referenceable.java
+++ b/typesystem/src/main/java/org/apache/hadoop/metadata/Referenceable.java
@@ -52,6 +52,13 @@ public class Referenceable extends Struct implements IReferenceableInstance {
         traits = ImmutableMap.of();
     }
 
+    public Referenceable(String guid, String typeName, Map<String, Object> values) {
+        super(typeName, values);
+        id = new Id(guid, 0, typeName);
+        traitNames = ImmutableList.of();
+        traits = ImmutableMap.of();
+    }
+
     @Override
     public ImmutableList<String> getTraits() {
         return traitNames;
diff --git a/typesystem/src/main/java/org/apache/hadoop/metadata/storage/Id.java b/typesystem/src/main/java/org/apache/hadoop/metadata/storage/Id.java
index b6e761e..bea9501 100644
--- a/typesystem/src/main/java/org/apache/hadoop/metadata/storage/Id.java
+++ b/typesystem/src/main/java/org/apache/hadoop/metadata/storage/Id.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.metadata.storage;
 
 import com.google.common.collect.ImmutableList;
-import org.apache.hadoop.metadata.IReferenceableInstance;
 import org.apache.hadoop.metadata.IStruct;
 import org.apache.hadoop.metadata.ITypedReferenceableInstance;
 import org.apache.hadoop.metadata.MetadataException;
@@ -28,6 +27,7 @@ import org.apache.hadoop.metadata.types.FieldMapping;
 import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.util.Date;
+import java.util.UUID;
 
 public class Id implements ITypedReferenceableInstance {
 
@@ -58,6 +58,16 @@ public class Id implements ITypedReferenceableInstance {
         }
     }
 
+    public boolean isAssigned() {
+        try {
+            UUID.fromString(id);
+        } catch (IllegalArgumentException e) {
+            return false;
+        }
+
+        return true;
+    }
+
     public String toString() {
         return String.format("(type: %s, id: %s)", className, isUnassigned() ? "<unassigned>" : "" + id);
     }
diff --git a/webapp/src/main/resources/application.properties b/webapp/src/main/resources/application.properties
new file mode 100644
index 0000000..bbbe31a
--- /dev/null
+++ b/webapp/src/main/resources/application.properties
@@ -0,0 +1,22 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# GraphService implementation
+metadata.graph.impl.class=org.apache.hadoop.metadata.repository.graph.TitanGraphService
+
+metadata.enableTLS=false
diff --git a/webapp/src/main/resources/graph.properties b/webapp/src/main/resources/graph.properties
new file mode 100644
index 0000000..b5759aa
--- /dev/null
+++ b/webapp/src/main/resources/graph.properties
@@ -0,0 +1,7 @@
+storage.backend=inmemory
+
+# Graph Search Index
+index.search.backend=elasticsearch
+index.search.directory=webapp/target/data/es
+index.search.elasticsearch.client-only=false
+index.search.elasticsearch.local-mode=true
\ No newline at end of file
diff --git a/webapp/src/main/resources/indexer.properties b/webapp/src/main/resources/indexer.properties
new file mode 100644
index 0000000..27ca0bd
--- /dev/null
+++ b/webapp/src/main/resources/indexer.properties
@@ -0,0 +1,40 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This is formatted as follows:
+# metadata.indexer.vertex.property.name.<index>=<Property Name>
+# metadata.indexer.vertex.property.type.<index>=<Data Type>
+
+metadata.indexer.vertex.property.name.0=DESC
+metadata.indexer.vertex.property.type.0=String
+metadata.indexer.vertex.property.name.1=DB_LOCATION_URI
+metadata.indexer.vertex.property.type.1=String
+metadata.indexer.vertex.property.name.2=NAME
+metadata.indexer.vertex.property.type.2=String
+metadata.indexer.vertex.property.name.3=OWNER_NAME
+metadata.indexer.vertex.property.type.3=String
+metadata.indexer.vertex.property.name.4=TBL_NAME
+metadata.indexer.vertex.property.type.4=String
+metadata.indexer.vertex.property.name.5=COMMENT
+metadata.indexer.vertex.property.type.5=String
+metadata.indexer.vertex.property.name.6=COLUMN_NAME
+metadata.indexer.vertex.property.type.6=String
+metadata.indexer.vertex.property.name.7=TYPE_NAME
+metadata.indexer.vertex.property.type.7=String
+
+
diff --git a/webapp/src/main/resources/log4j.xml b/webapp/src/main/resources/log4j.xml
new file mode 100644
index 0000000..19fd864
--- /dev/null
+++ b/webapp/src/main/resources/log4j.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+    <appender name="console" class="org.apache.log4j.ConsoleAppender">
+        <param name="Target" value="System.out"/>
+        <layout class="org.apache.log4j.PatternLayout">
+            <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/>
+        </layout>
+    </appender>
+
+    <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
+        <param name="File" value="${user.dir}/target/logs/application.log"/>
+        <param name="Append" value="true"/>
+        <param name="Threshold" value="debug"/>
+        <layout class="org.apache.log4j.PatternLayout">
+            <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/>
+        </layout>
+    </appender>
+
+    <appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender">
+        <param name="File" value="${user.dir}/target/logs/audit.log"/>
+        <param name="Append" value="true"/>
+        <param name="Threshold" value="debug"/>
+        <layout class="org.apache.log4j.PatternLayout">
+            <param name="ConversionPattern" value="%d %x %m%n"/>
+        </layout>
+    </appender>
+
+    <logger name="org.apache.hadoop.metadata" additivity="false">
+        <level value="debug"/>
+        <appender-ref ref="console"/>
+        <appender-ref ref="FILE"/>
+    </logger>
+
+    <logger name="AUDIT">
+        <level value="info"/>
+        <appender-ref ref="console"/>
+        <appender-ref ref="AUDIT"/>
+    </logger>
+
+    <root>
+        <priority value="info"/>
+        <appender-ref ref="console"/>
+        <appender-ref ref="FILE"/>
+    </root>
+
+</log4j:configuration>
diff --git a/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/BaseResourceIT.java b/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/BaseResourceIT.java
index e72ca17..a527e5d 100644
--- a/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/BaseResourceIT.java
+++ b/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/BaseResourceIT.java
@@ -34,6 +34,12 @@ public abstract class BaseResourceIT {
         service = client.resource(UriBuilder.fromUri(baseUrl).build());
     }
 
+    protected AttributeDefinition createUniqueRequiredAttrDef(String name,
+                                                              IDataType dataType) {
+        return new AttributeDefinition(name, dataType.getName(),
+                Multiplicity.REQUIRED, false, true, true, null);
+    }
+
     protected AttributeDefinition createRequiredAttrDef(String name,
                                                         IDataType dataType) {
         return new AttributeDefinition(name, dataType.getName(),
diff --git a/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/EntityJerseyResourceIT.java b/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/EntityJerseyResourceIT.java
index 7fd56f2..0f5cca6 100644
--- a/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/EntityJerseyResourceIT.java
+++ b/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/EntityJerseyResourceIT.java
@@ -21,14 +21,19 @@ package org.apache.hadoop.metadata.web.resources;
 import com.google.common.collect.ImmutableList;
 import com.sun.jersey.api.client.ClientResponse;
 import com.sun.jersey.api.client.WebResource;
+import org.apache.hadoop.metadata.ITypedInstance;
 import org.apache.hadoop.metadata.ITypedReferenceableInstance;
+import org.apache.hadoop.metadata.ITypedStruct;
 import org.apache.hadoop.metadata.Referenceable;
 import org.apache.hadoop.metadata.Struct;
 import org.apache.hadoop.metadata.json.Serialization$;
 import org.apache.hadoop.metadata.json.TypesSerialization;
 import org.apache.hadoop.metadata.types.AttributeDefinition;
+import org.apache.hadoop.metadata.types.AttributeInfo;
 import org.apache.hadoop.metadata.types.ClassType;
 import org.apache.hadoop.metadata.types.DataTypes;
+import org.apache.hadoop.metadata.types.EnumTypeDefinition;
+import org.apache.hadoop.metadata.types.EnumValue;
 import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
 import org.apache.hadoop.metadata.types.Multiplicity;
 import org.apache.hadoop.metadata.types.StructTypeDefinition;
@@ -126,20 +131,33 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
         LOG.debug("tableInstanceAfterGet = " + definition);
 
         // todo - this fails with type error, strange
-        // ITypedReferenceableInstance tableInstanceAfterGet = Serialization$.MODULE$.fromJson(definition);
-        // Assert.assertTrue(areEqual(tableInstance, tableInstanceAfterGet));
+        ITypedReferenceableInstance tableInstanceAfterGet = Serialization$.MODULE$.fromJson(definition);
+        Assert.assertTrue(areEqual(tableInstance, tableInstanceAfterGet));
     }
 
+    private boolean areEqual(ITypedInstance actual,
+                             ITypedInstance expected) throws Exception {
 /*
-    private boolean areEqual(ITypedReferenceableInstance actual,
-                             ITypedReferenceableInstance expected) throws Exception {
+        Assert.assertEquals(Serialization$.MODULE$.toJson(actual),
+                Serialization$.MODULE$.toJson(expected));
+*/
+
         for (AttributeInfo attributeInfo : actual.fieldMapping().fields.values()) {
-            Assert.assertEquals(actual.get(attributeInfo.name), expected.get(attributeInfo.name));
+            final DataTypes.TypeCategory typeCategory = attributeInfo.dataType().getTypeCategory();
+            if (typeCategory == DataTypes.TypeCategory.STRUCT
+                    || typeCategory == DataTypes.TypeCategory.TRAIT
+                    || typeCategory == DataTypes.TypeCategory.CLASS) {
+                areEqual((ITypedStruct) actual.get(attributeInfo.name),
+                        (ITypedStruct) expected.get(attributeInfo.name));
+            } else if (typeCategory == DataTypes.TypeCategory.PRIMITIVE
+                    || typeCategory == DataTypes.TypeCategory.ENUM) {
+                Assert.assertEquals(actual.get(attributeInfo.name),
+                        expected.get(attributeInfo.name));
+            }
         }
 
         return true;
     }
-*/
 
     @Test
     public void testGetInvalidEntityDefinition() throws Exception {
@@ -220,6 +238,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
                         createRequiredAttrDef("description", DataTypes.STRING_TYPE));
         typeSystem.defineClassType(testTypeDefinition);
 
+        @SuppressWarnings("unchecked")
         String typesAsJSON = TypesSerialization.toJson(typeSystem,
                 Arrays.asList(new String[]{"test"}));
         sumbitType(typesAsJSON, "test");
@@ -229,7 +248,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
         HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
                 createClassTypeDef(DATABASE_TYPE,
                         ImmutableList.<String>of(),
-                        createRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
                         createRequiredAttrDef("description", DataTypes.STRING_TYPE));
 
         StructTypeDefinition structTypeDefinition =
@@ -239,12 +258,22 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
                         createRequiredAttrDef("serde", DataTypes.STRING_TYPE)
                         });
 
+        EnumValue values[] = {
+                new EnumValue("MANAGED", 1),
+                new EnumValue("EXTERNAL", 2),
+        };
+
+        EnumTypeDefinition enumTypeDefinition =  new EnumTypeDefinition("tableType", values);
+        typeSystem.defineEnumType(enumTypeDefinition);
+
         HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
                 createClassTypeDef(TABLE_TYPE,
                         ImmutableList.<String>of(),
-                        createRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
                         createRequiredAttrDef("description", DataTypes.STRING_TYPE),
                         createRequiredAttrDef("type", DataTypes.STRING_TYPE),
+                        new AttributeDefinition("tableType", "tableType",
+                                Multiplicity.REQUIRED, false, null),
                         new AttributeDefinition("serde1",
                                 "serdeType", Multiplicity.REQUIRED, false, null),
                         new AttributeDefinition("serde2",
@@ -264,8 +293,14 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
     }
 
     private void submitTypes() throws Exception {
+        @SuppressWarnings("unchecked")
         String typesAsJSON = TypesSerialization.toJson(typeSystem,
-                Arrays.asList(new String[]{DATABASE_TYPE, TABLE_TYPE, "serdeType", "classification"}));
+                Arrays.asList(new String[]{
+                        "tableType",
+                        DATABASE_TYPE,
+                        TABLE_TYPE,
+                        "serdeType",
+                        "classification"}));
         sumbitType(typesAsJSON, TABLE_TYPE);
     }
 
@@ -298,6 +333,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
         tableInstance.set("name", TABLE_NAME);
         tableInstance.set("description", "bar table");
         tableInstance.set("type", "managed");
+        tableInstance.set("tableType", 1); // enum
         tableInstance.set("database", databaseInstance);
 
         Struct traitInstance = (Struct) tableInstance.getTrait("classification");
diff --git a/webapp/src/test/resources/application.properties b/webapp/src/test/resources/application.properties
deleted file mode 100644
index dfa5bba..0000000
--- a/webapp/src/test/resources/application.properties
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# GraphService implementation
-metadata.graph.impl.class=org.apache.hadoop.metadata.repository.graph.TitanGraphService
-
-# Graph Storage
-metadata.graph.storage.backend=inmemory
-
-metadata.enableTLS=false
diff --git a/webapp/src/test/resources/graph.properties b/webapp/src/test/resources/graph.properties
deleted file mode 100644
index 07a98d3..0000000
--- a/webapp/src/test/resources/graph.properties
+++ /dev/null
@@ -1,7 +0,0 @@
-storage.backend=inmemory
-
-# Graph Search Index
-index.search.backend=elasticsearch
-index.search.directory=target/data/es
-index.search.elasticsearch.client-only=false
-index.search.elasticsearch.local-mode=true
\ No newline at end of file
diff --git a/webapp/src/test/resources/indexer.properties b/webapp/src/test/resources/indexer.properties
deleted file mode 100644
index 27ca0bd..0000000
--- a/webapp/src/test/resources/indexer.properties
+++ /dev/null
@@ -1,40 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is formatted as follows:
-# metadata.indexer.vertex.property.name.<index>=<Property Name>
-# metadata.indexer.vertex.property.type.<index>=<Data Type>
-
-metadata.indexer.vertex.property.name.0=DESC
-metadata.indexer.vertex.property.type.0=String
-metadata.indexer.vertex.property.name.1=DB_LOCATION_URI
-metadata.indexer.vertex.property.type.1=String
-metadata.indexer.vertex.property.name.2=NAME
-metadata.indexer.vertex.property.type.2=String
-metadata.indexer.vertex.property.name.3=OWNER_NAME
-metadata.indexer.vertex.property.type.3=String
-metadata.indexer.vertex.property.name.4=TBL_NAME
-metadata.indexer.vertex.property.type.4=String
-metadata.indexer.vertex.property.name.5=COMMENT
-metadata.indexer.vertex.property.type.5=String
-metadata.indexer.vertex.property.name.6=COLUMN_NAME
-metadata.indexer.vertex.property.type.6=String
-metadata.indexer.vertex.property.name.7=TYPE_NAME
-metadata.indexer.vertex.property.type.7=String
-
-
--
libgit2 0.27.1