Commit ee143338 by Shwetha GS

graph based type store

parent 82aeb21d
......@@ -31,10 +31,6 @@ target
.externalToolBuilders
maven-eclipse.xml
#ActiveMQ
activemq-data
build
#log files
logs
*.log
......@@ -174,7 +174,6 @@
<configuration>
<!--debug>true</debug -->
<xmlOutput>true</xmlOutput>
<excludeFilterFile>${basedir}/../src/build/findbugs-exclude.xml</excludeFilterFile>
<failOnError>false</failOnError>
</configuration>
<executions>
......@@ -268,23 +267,6 @@
<dependencyManagement>
<dependencies>
<!-- hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>org.glassfish</groupId>
<artifactId>javax.servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- hive -->
<dependency>
<groupId>org.apache.hive</groupId>
......@@ -336,6 +318,12 @@
<version>${falcon.version}</version>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jsp-2.1</artifactId>
<version>6.0.0</version>
</dependency>
<!-- Logging -->
<dependency>
<groupId>org.slf4j</groupId>
......@@ -943,11 +931,10 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.7.2</version>
<!--
<configuration>
<skipTests>true</skipTests>
<forkMode>always</forkMode>
</configuration>
-->
<dependencies>
<dependency>
<groupId>org.apache.maven.surefire</groupId>
......@@ -968,10 +955,13 @@
<version>2.16</version>
<configuration>
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<forkMode>always</forkMode>
<argLine>-Djava.security.krb5.realm= -Djava.security.krb5.kdc=
-Dhadoop.tmp.dir=${project.build.directory}/tmp-hadoop-${user.name}
</argLine>
<parallel>none</parallel>
<reuseForks>false</reuseForks>
<forkCount>1</forkCount>
<threadCount>1</threadCount>
</configuration>
<executions>
<execution>
......
......@@ -40,11 +40,6 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
......
......@@ -30,9 +30,11 @@ import org.apache.hadoop.metadata.repository.graph.GraphBackedSearchIndexer;
import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.repository.graph.GraphService;
import org.apache.hadoop.metadata.repository.graph.GraphServiceConfigurator;
import org.apache.hadoop.metadata.repository.typestore.GraphTypeStore;
import org.apache.hadoop.metadata.repository.graph.TitanGraphProvider;
import org.apache.hadoop.metadata.services.DefaultMetadataService;
import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore;
/**
* Guice module for Repository module.
......@@ -44,6 +46,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// MetadataRepositoryService implementation class
private Class<? extends MetadataRepository> metadataRepoClass;
private Class<? extends ITypeStore> typeStore;
private Class<? extends MetadataService> metadataService;
private Class<? extends DiscoveryService> discoveryService;
private Class<? extends SearchIndexer> searchIndexer;
......@@ -54,6 +57,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// get the impl classes for the repo and the graph service
this.graphServiceClass = gsp.getImplClass();
this.metadataRepoClass = GraphBackedMetadataRepository.class;
this.typeStore = GraphTypeStore.class;
this.metadataService = DefaultMetadataService.class;
this.discoveryService = GraphBackedDiscoveryService.class;
this.searchIndexer = GraphBackedSearchIndexer.class;
......@@ -71,6 +75,9 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// bind the MetadataRepositoryService interface to an implementation
bind(MetadataRepository.class).to(metadataRepoClass);
// bind the ITypeStore interface to an implementation
bind(ITypeStore.class).to(typeStore);
// bind the GraphService interface to an implementation
bind(GraphService.class).to(graphServiceClass);
......
......@@ -156,4 +156,12 @@ public interface MetadataRepository {
*/
void deleteTrait(String guid,
String traitNameToBeDeleted) throws RepositoryException;
/**
* Adds the property to the entity that corresponds to the GUID
* @param guid entity id
* @param property
* @param value
*/
void addProperty(String guid, String property, String value) throws RepositoryException;
}
......@@ -33,6 +33,13 @@ public final class Constants {
public static final String ENTITY_TYPE_INDEX = "type_index";
/**
* Properties for type store graph
*/
public static final String TYPE_CATEGORY_PROPERTY_KEY = "type.category";
public static final String VERTEX_TYPE_PROPERTY_KEY = "type";
public static final String TYPENAME_PROPERTY_KEY = "type.name";
/**
* Trait names property key and index name.
*/
public static final String TRAIT_NAMES_PROPERTY_KEY = "traitNames";
......
......@@ -297,6 +297,46 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
}
@Override
public void addProperty(String guid, String property, String value) throws RepositoryException {
LOG.info("Adding property {} for entity guid {}", property, guid);
try {
titanGraph.rollback(); // clean up before starting a query
Vertex instanceVertex = GraphHelper.findVertexByGUID(titanGraph, guid);
if (instanceVertex == null) {
throw new RepositoryException("Could not find a vertex for guid " + guid);
}
LOG.debug("Found a vertex {} for guid {}", instanceVertex, guid);
String typeName = instanceVertex.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
ClassType type = typeSystem.getDataType(ClassType.class, typeName);
AttributeInfo attributeInfo = type.fieldMapping.fields.get(property);
if (attributeInfo == null) {
throw new MetadataException("Invalid property " + property + " for entity " + typeName);
}
DataTypes.TypeCategory attrTypeCategory = attributeInfo.dataType().getTypeCategory();
ITypedReferenceableInstance instance = type.createInstance();
if (attrTypeCategory == DataTypes.TypeCategory.PRIMITIVE) {
instance.set(property, value);
} else if (attrTypeCategory == DataTypes.TypeCategory.CLASS) {
Id id = new Id(value, 0, attributeInfo.dataType().getName());
instance.set(property, id);
} else {
throw new RepositoryException("Update of " + attrTypeCategory + " is not supported");
}
instanceToGraphMapper.mapAttributesToVertex(getIdFromVertex(typeName, instanceVertex),
instance, instanceVertex, new HashMap<Id, Vertex>(), attributeInfo, attributeInfo.dataType());
titanGraph.commit();
} catch (Exception e) {
throw new RepositoryException(e);
} finally {
titanGraph.rollback();
}
}
public Id getIdFromVertex(String dataTypeName, Vertex vertex) {
return new Id(
vertex.<String>getProperty(Constants.GUID_PROPERTY_KEY),
......@@ -383,8 +423,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
throw new RepositoryException("TypeSystem error when walking the ObjectGraph", me);
}
List<ITypedReferenceableInstance> newTypedInstances = discoverInstances(
entityProcessor);
List<ITypedReferenceableInstance> newTypedInstances = discoverInstances(entityProcessor);
entityProcessor.createVerticesForClassTypes(newTypedInstances);
return addDiscoveredInstances(typedInstance, entityProcessor, newTypedInstances);
}
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.repository.typestore;
import com.google.common.collect.ImmutableList;
import com.google.inject.Inject;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.graph.Constants;
import org.apache.hadoop.metadata.repository.graph.GraphService;
import org.apache.hadoop.metadata.repository.graph.TitanGraphService;
import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.AttributeInfo;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumType;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.StructType;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.TypeUtils;
import org.codehaus.jettison.json.JSONException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
public class GraphTypeStore implements ITypeStore {
private static final String PROPERTY_PREFIX = "type.";
public static final String SUPERTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".supertype";
public static final String SUBTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".subtype";
private static final ImmutableList META_PROPERTIES = ImmutableList.of(Constants.VERTEX_TYPE_PROPERTY_KEY,
Constants.TYPE_CATEGORY_PROPERTY_KEY, Constants.TYPENAME_PROPERTY_KEY);
private static Logger LOG = LoggerFactory.getLogger(GraphTypeStore.class);
private final TitanGraph titanGraph;
@Inject
public GraphTypeStore(GraphService graphService) {
titanGraph = ((TitanGraphService)graphService).getTitanGraph();
}
@Override
public void store(TypeSystem typeSystem) throws MetadataException {
store(typeSystem, ImmutableList.copyOf(typeSystem.getTypeNames()));
}
@Override
public void store(TypeSystem typeSystem, ImmutableList<String> typeNames) throws MetadataException {
try {
ImmutableList<String> coreTypes = typeSystem.getCoreTypes();
titanGraph.rollback(); //Cleanup previous state
for (String typeName : typeNames) {
if (!coreTypes.contains(typeName)) {
IDataType dataType = typeSystem.getDataType(IDataType.class, typeName);
LOG.debug("Adding {}.{} to type store", dataType.getTypeCategory(), dataType.getName());
switch (dataType.getTypeCategory()) {
case ENUM:
storeInGraph((EnumType)dataType);
break;
case STRUCT:
StructType structType = (StructType) dataType;
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
ImmutableList.copyOf(structType.infoToNameMap.keySet()), ImmutableList.<String>of());
break;
case TRAIT:
case CLASS:
HierarchicalType type = (HierarchicalType) dataType;
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
type.immediateAttrs, type.superTypes);
break;
default: //Ignore primitive/collection types as they are covered under references
break;
}
}
}
titanGraph.commit();
} finally {
titanGraph.rollback();
}
}
private void storeInGraph(EnumType dataType) {
Vertex vertex = createVertex(dataType.getTypeCategory(), dataType.getName());
for (EnumValue value : dataType.values()) {
String key = getPropertyKey(dataType.getName(), value.value);
vertex.setProperty(key, value.ordinal);
}
}
private String getPropertyKey(String parent, String child) {
return PROPERTY_PREFIX + parent + "." + child;
}
private String getEdgeLabel(String parent, String child) {
return PROPERTY_PREFIX + "edge." + parent + "." + child;
}
private void storeInGraph(TypeSystem typeSystem, DataTypes.TypeCategory category, String typeName,
ImmutableList<AttributeInfo> attributes, ImmutableList<String> superTypes) throws MetadataException {
Vertex vertex = createVertex(category, typeName);
if (attributes != null) {
for (AttributeInfo attribute : attributes) {
String propertyKey = getPropertyKey(typeName, attribute.name);
try {
vertex.setProperty(propertyKey, attribute.toJson());
} catch (JSONException e) {
throw new StorageException(typeName, e);
}
addReferencesForAttribute(typeSystem, vertex, attribute);
}
}
//Add edges for hierarchy
if (superTypes != null) {
for (String superTypeName : superTypes) {
HierarchicalType superType = typeSystem.getDataType(HierarchicalType.class, superTypeName);
Vertex superVertex = createVertex(superType.getTypeCategory(), superTypeName);
addEdge(vertex, superVertex, SUPERTYPE_EDGE_LABEL);
addEdge(superVertex, vertex, SUBTYPE_EDGE_LABEL);
}
}
}
//Add edges for complex attributes
private void addReferencesForAttribute(TypeSystem typeSystem, Vertex vertex, AttributeInfo attribute) throws MetadataException {
ImmutableList<String> coreTypes = typeSystem.getCoreTypes();
List<IDataType> attrDataTypes = new ArrayList<>();
IDataType attrDataType = attribute.dataType();
String vertexTypeName = vertex.getProperty(Constants.TYPENAME_PROPERTY_KEY);
switch (attrDataType.getTypeCategory()) {
case ARRAY:
String attrType = TypeUtils.parseAsArrayType(attrDataType.getName());
IDataType elementType = typeSystem.getDataType(IDataType.class, attrType);
attrDataTypes.add(elementType);
break;
case MAP:
String[] attrTypes = TypeUtils.parseAsMapType(attrDataType.getName());
IDataType keyType = typeSystem.getDataType(IDataType.class, attrTypes[0]);
IDataType valueType = typeSystem.getDataType(IDataType.class, attrTypes[1]);
attrDataTypes.add(keyType);
attrDataTypes.add(valueType);
break;
case ENUM:
case STRUCT:
case CLASS:
attrDataTypes.add(attrDataType);
break;
case PRIMITIVE: //no vertex for primitive type, hence no edge required
break;
default:
throw new IllegalArgumentException("Unhandled type category " + attrDataType.getTypeCategory());
}
for (IDataType attrType : attrDataTypes) {
if (!coreTypes.contains(attrType.getName())) {
Vertex attrVertex = createVertex(attrType.getTypeCategory(), attrType.getName());
String label = getEdgeLabel(vertexTypeName, attribute.name);
addEdge(vertex, attrVertex, label);
}
}
}
private void addEdge(Vertex fromVertex, Vertex toVertex, String label) {
LOG.debug("Adding edge from {} to {} with label {}" + toString(fromVertex), toString(toVertex), label);
titanGraph.addEdge(null, fromVertex, toVertex, label);
}
@Override
public TypesDef restore() throws MetadataException {
//Get all vertices for type system
Iterator vertices = titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, PROPERTY_PREFIX).vertices().iterator();
ImmutableList.Builder<EnumTypeDefinition> enums = ImmutableList.builder();
ImmutableList.Builder<StructTypeDefinition> structs = ImmutableList.builder();
ImmutableList.Builder<HierarchicalTypeDefinition<ClassType>> classTypes = ImmutableList.builder();
ImmutableList.Builder<HierarchicalTypeDefinition<TraitType>> traits = ImmutableList.builder();
while (vertices.hasNext()) {
Vertex vertex = (Vertex) vertices.next();
DataTypes.TypeCategory typeCategory = vertex.getProperty(Constants.TYPE_CATEGORY_PROPERTY_KEY);
String typeName = vertex.getProperty(Constants.TYPENAME_PROPERTY_KEY);
switch(typeCategory) {
case ENUM:
enums.add(getEnumType(vertex));
break;
case STRUCT:
AttributeDefinition[] attributes = getAttributes(vertex);
structs.add(new StructTypeDefinition(typeName, attributes));
break;
case CLASS:
ImmutableList<String> superTypes = getSuperTypes(vertex);
attributes = getAttributes(vertex);
classTypes.add(new HierarchicalTypeDefinition(ClassType.class, typeName, superTypes, attributes));
break;
case TRAIT:
superTypes = getSuperTypes(vertex);
attributes = getAttributes(vertex);
traits.add(new HierarchicalTypeDefinition(TraitType.class, typeName, superTypes, attributes));
break;
default:
throw new IllegalArgumentException("Unhandled type category " + typeCategory);
}
}
return TypeUtils.getTypesDef(enums.build(), structs.build(), traits.build(), classTypes.build());
}
private EnumTypeDefinition getEnumType(Vertex vertex) {
String typeName = vertex.getProperty(Constants.TYPENAME_PROPERTY_KEY);
List<EnumValue> enumValues = new ArrayList<>();
for (String property : vertex.getPropertyKeys()) {
if (!META_PROPERTIES.contains(property)) {
String enumValue = StringUtils.removeStart(property, PROPERTY_PREFIX + typeName);
enumValues.add(new EnumValue(enumValue, vertex.<Integer>getProperty(property)));
}
}
return new EnumTypeDefinition(typeName, enumValues.toArray(new EnumValue[enumValues.size()]));
}
private ImmutableList<String> getSuperTypes(Vertex vertex) {
List<String> superTypes = new ArrayList<>();
Iterator<Edge> edges = vertex.getEdges(Direction.OUT, SUPERTYPE_EDGE_LABEL).iterator();
while (edges.hasNext()) {
Edge edge = edges.next();
superTypes.add((String) edge.getVertex(Direction.IN).getProperty(Constants.TYPENAME_PROPERTY_KEY));
}
return ImmutableList.copyOf(superTypes);
}
private AttributeDefinition[] getAttributes(Vertex vertex) throws MetadataException {
List<AttributeDefinition> attributes = new ArrayList<>();
for (String property : vertex.getPropertyKeys()) {
if (!META_PROPERTIES.contains(property)) {
try {
attributes.add(AttributeInfo.fromJson((String) vertex.getProperty(property)));
} catch (JSONException e) {
throw new MetadataException(e);
}
}
}
return attributes.toArray(new AttributeDefinition[attributes.size()]);
}
private String toString(Vertex vertex) {
return PROPERTY_PREFIX + "." + vertex.getProperty(Constants.TYPENAME_PROPERTY_KEY);
}
/**
* Find vertex for the given type category and name, else create new vertex
* @param category
* @param typeName
* @return vertex
*/
private Vertex findVertex(DataTypes.TypeCategory category, String typeName) {
LOG.debug("Finding vertex for ({} - {}), ({} - {})", Constants.TYPE_CATEGORY_PROPERTY_KEY, category,
Constants.TYPENAME_PROPERTY_KEY, typeName);
Iterator results = titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, PROPERTY_PREFIX)
.has(Constants.TYPENAME_PROPERTY_KEY, typeName).vertices().iterator();
Vertex vertex = null;
if (results != null && results.hasNext()) {
//There should be just one vertex with the given typeName
vertex = (Vertex) results.next();
}
return vertex;
}
private Vertex createVertex(DataTypes.TypeCategory category, String typeName) {
Vertex vertex = findVertex(category, typeName);
if (vertex == null) {
LOG.debug("Adding vertex {}{}", PROPERTY_PREFIX, typeName);
vertex = titanGraph.addVertex(null);
vertex.setProperty(Constants.VERTEX_TYPE_PROPERTY_KEY, PROPERTY_PREFIX); //Mark as type vertex
vertex.setProperty(Constants.TYPE_CATEGORY_PROPERTY_KEY, category);
vertex.setProperty(Constants.TYPENAME_PROPERTY_KEY, typeName);
}
return vertex;
}
}
......@@ -16,71 +16,33 @@
* limitations under the License.
*/
package org.apache.hadoop.metadata.typesystem.types.store;
package org.apache.hadoop.metadata.repository.typestore;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import java.util.HashMap;
import java.util.Map;
public abstract class TypeSystemStore {
public interface ITypeStore {
/**
* Persist the type system under namespace - insert or update
* Persist the entire type system - insert or update
* @param typeSystem type system to persist
* @param namespace
* @throws StorageException
*/
public synchronized void store(TypeSystem typeSystem, String namespace)
throws StorageException {
String json = TypesSerialization$.MODULE$.toJson(typeSystem, typeSystem.getTypeNames());
publish(namespace, json);
}
/**
* Restore all type definitions
* @return List of persisted type definitions
* @throws MetadataException
*/
public synchronized ImmutableMap<String, TypesDef> restore() throws MetadataException {
ImmutableList<String> nameSpaces = listNamespaces();
Map<String, TypesDef> typesDefs = new HashMap<>();
for (String namespace : nameSpaces) {
String json = fetch(namespace);
typesDefs.put(namespace, TypesSerialization$.MODULE$.fromJson(json));
}
return ImmutableMap.copyOf(typesDefs);
}
public void store(TypeSystem typeSystem) throws MetadataException;
/**
* Restore specified namespace as type definition
* @param namespace
* @return type definition
* @throws MetadataException
* Persist the given type in the type system - insert or update
* @param typeSystem type system
* @param types types to persist
* @throws StorageException
*/
public synchronized TypesDef restore(String namespace) throws MetadataException {
String json = fetch(namespace);
return TypesSerialization$.MODULE$.fromJson(json);
}
public void store(TypeSystem typeSystem, ImmutableList<String> types) throws MetadataException;
/**
* Delete the specified namespace
* @param namespace
* @throws StorageException
* Restore all type definitions
* @return List of persisted type definitions
* @throws org.apache.hadoop.metadata.MetadataException
*/
public abstract void delete(String namespace) throws StorageException;
//Interfaces for concrete implementations
protected abstract void publish(String namespace, String json) throws StorageException;
protected abstract String fetch(String namespace) throws StorageException;
protected abstract ImmutableList<String> listNamespaces() throws MetadataException;
public TypesDef restore() throws MetadataException;
}
......@@ -16,17 +16,17 @@
* limitations under the License.
*/
package org.apache.hadoop.metadata.typesystem.types.store;
package org.apache.hadoop.metadata.repository.typestore;
import org.apache.hadoop.metadata.MetadataException;
public class StorageException extends MetadataException {
public StorageException(String nameSpace) {
super("Failure in typesystem storage - " + nameSpace);
public StorageException(String type) {
super("Failure in typesystem storage for type " + type);
}
public StorageException(String nameSpace, Throwable cause) {
super("Failure in typesystem storage - " + nameSpace, cause);
public StorageException(String type, Throwable cause) {
super("Failure in typesystem storage for type " + type, cause);
}
public StorageException(Throwable cause) {
......
......@@ -19,11 +19,13 @@
package org.apache.hadoop.metadata.services;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.discovery.SearchIndexer;
import org.apache.hadoop.metadata.listener.EntityChangeListener;
import org.apache.hadoop.metadata.listener.TypesChangeListener;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.TypesDef;
......@@ -57,10 +59,12 @@ public class DefaultMetadataService implements MetadataService {
private final TypeSystem typeSystem;
private final MetadataRepository repository;
private final ITypeStore typeStore;
@Inject
DefaultMetadataService(MetadataRepository repository,
SearchIndexer searchIndexer) throws MetadataException {
SearchIndexer searchIndexer, ITypeStore typeStore) throws MetadataException {
this.typeStore = typeStore;
this.typeSystem = TypeSystem.getInstance();
this.repository = repository;
......@@ -83,6 +87,8 @@ public class DefaultMetadataService implements MetadataService {
TypesDef typesDef = TypesSerialization.fromJson(typeDefinition);
Map<String, IDataType> typesAdded = typeSystem.defineTypes(typesDef);
//TODO how do we handle transaction - store failure??
typeStore.store(typeSystem, ImmutableList.copyOf(typesAdded.keySet()));
onTypesAddedToRepo(typesAdded);
......@@ -160,7 +166,6 @@ public class DefaultMetadataService implements MetadataService {
final String guid = repository.createEntity(entityInstance, entityType);
onEntityAddedToRepo(entityType, entityInstance);
return guid;
}
......@@ -191,6 +196,15 @@ public class DefaultMetadataService implements MetadataService {
return repository.getEntityList(entityType);
}
@Override
public void addProperty(String guid, String property, String value) throws MetadataException {
Preconditions.checkNotNull(guid, "guid cannot be null");
Preconditions.checkNotNull(property, "property cannot be null");
Preconditions.checkNotNull(value, "property value cannot be null");
repository.addProperty(guid, property, value);
}
private void validateTypeExists(String entityType) throws MetadataException {
Preconditions.checkNotNull(entityType, "entity type cannot be null");
......
......@@ -86,6 +86,14 @@ public interface MetadataService {
*/
List<String> getEntityList(String entityType) throws MetadataException;
/**
* Adds the property to the given entity id(guid)
* @param guid entity id
* @param property
* @param value
*/
void addProperty(String guid, String property, String value) throws MetadataException;
// Trait management functions
/**
* Gets the list of trait names for a given entity represented by a guid.
......
......@@ -24,6 +24,9 @@ import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumType;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
......@@ -32,6 +35,7 @@ import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.testng.Assert;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef;
......@@ -53,9 +57,14 @@ public final class TestUtils {
*/
public static void defineDeptEmployeeTypes(TypeSystem ts) throws MetadataException {
EnumTypeDefinition orgLevelEnum =
new EnumTypeDefinition("OrgLevel", new EnumValue("L1", 1), new EnumValue("L2", 2));
ts.defineEnumType(orgLevelEnum);
HierarchicalTypeDefinition<ClassType> deptTypeDef =
createClassTypeDef("Department", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createOptionalAttrDef("orgLevel", ts.getDataType(EnumType.class, "OrgLevel")),
new AttributeDefinition("employees",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true,
"department")
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.repository.typestore;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import junit.framework.Assert;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.TestUtils;
import org.apache.hadoop.metadata.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.repository.graph.TitanGraphService;
import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
import javax.inject.Inject;
import java.util.List;
@Guice(modules = RepositoryMetadataModule.class)
public class GraphTypeStoreTest {
@Inject
private TitanGraphService titanGraphService;
@Inject
private ITypeStore typeStore;
private TypeSystem ts;
@BeforeClass
public void setUp() throws Exception {
// start the injected graph service
titanGraphService.initialize();
ts = TypeSystem.getInstance();
ts.reset();
TestUtils.defineDeptEmployeeTypes(ts);
}
@Test
public void testStore() throws MetadataException {
typeStore.store(ts);
dumpGraph();
}
private void dumpGraph() {
TitanGraph graph = titanGraphService.getTitanGraph();
for (Vertex v : graph.getVertices()) {
System.out.println("****v = " + GraphHelper.vertexString(v));
for (Edge e : v.getEdges(Direction.OUT)) {
System.out.println("****e = " + GraphHelper.edgeString(e));
}
}
}
@Test (dependsOnMethods = "testStore")
public void testRestore() throws Exception {
TypesDef types = typeStore.restore();
//validate enum
List<EnumTypeDefinition> enumTypes = types.enumTypesAsJavaList();
Assert.assertEquals(1, enumTypes.size());
//validate class
Assert.assertTrue(types.structTypesAsJavaList().isEmpty());
List<HierarchicalTypeDefinition<ClassType>> classTypes = types.classTypesAsJavaList();
Assert.assertEquals(3, classTypes.size());
for (HierarchicalTypeDefinition<ClassType> classType : classTypes) {
ClassType expectedType = ts.getDataType(ClassType.class, classType.typeName);
Assert.assertEquals(expectedType.immediateAttrs.size(), classType.attributeDefinitions.length);
}
//validate trait
List<HierarchicalTypeDefinition<TraitType>> traitTypes = types.traitTypesAsJavaList();
Assert.assertEquals(1, traitTypes.size());
HierarchicalTypeDefinition<TraitType> trait = traitTypes.get(0);
Assert.assertEquals("SecurityClearance", trait.typeName);
Assert.assertEquals(1, trait.attributeDefinitions.length);
AttributeDefinition attribute = trait.attributeDefinitions[0];
Assert.assertEquals("level", attribute.name);
Assert.assertEquals(DataTypes.INT_TYPE.getName(), attribute.dataTypeName);
//validate the new types
ts.reset();
ts.defineTypes(types);
}
}
......@@ -15,11 +15,3 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.typesystem.types.store;
public class StorageFactory {
public static TypeSystemStore getTypeSystemStore() {
return HdfsStore.getInstance();
}
}
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!DOCTYPE module PUBLIC
"-//Puppy Crawl//DTD Check Configuration 1.2//EN"
"http://www.puppycrawl.com/dtds/configuration_1_2.dtd">
<!--
Checkstyle configuration for Falcon that is based on the sun_checks.xml file
that is bundled with Checkstyle and includes checks for:
- the Java Language Specification at
http://java.sun.com/docs/books/jls/second_edition/html/index.html
- the Sun Code Conventions at http://java.sun.com/docs/codeconv/
- the Javadoc guidelines at
http://java.sun.com/j2se/javadoc/writingdoccomments/index.html
- the JDK Api documentation http://java.sun.com/j2se/docs/api/index.html
- some best practices
Checkstyle is very configurable. Be sure to read the documentation at
http://checkstyle.sf.net (or in your downloaded distribution).
Most Checks are configurable, be sure to consult the documentation.
To completely disable a check, just comment it out or delete it from the file.
Finally, it is worth reading the documentation.
-->
<module name="Checker">
<!-- Checks that a package.html file exists for each package. -->
<!-- See http://checkstyle.sf.net/config_javadoc.html#PackageHtml -->
<!-- module name="PackageHtml"/ -->
<!-- Checks whether files end with a new line. -->
<!-- See http://checkstyle.sf.net/config_misc.html#NewlineAtEndOfFile -->
<module name="NewlineAtEndOfFile"/>
<!-- Checks for Headers -->
<!-- See http://checkstyle.sf.net/config_header.html -->
<module name="Header">
<property name="headerFile" value="${checkstyle.header.file}"/>
<property name="fileExtensions" value="java"/>
</module>
<module name="FileLength"/>
<module name="FileTabCharacter"/>
<module name="TreeWalker">
<!-- Checks for Javadoc comments. -->
<!-- See http://checkstyle.sf.net/config_javadoc.html -->
<module name="JavadocType">
<property name="scope" value="public"/>
<property name="allowMissingParamTags" value="true"/>
</module>
<module name="JavadocStyle"/>
<module name="SuperClone"/>
<module name="SuperFinalize"/>
<!-- Checks for Naming Conventions. -->
<!-- See http://checkstyle.sf.net/config_naming.html -->
<module name="ConstantName"/>
<module name="ClassTypeParameterName">
<property name="format" value="^[A-Z]+$"/>
</module>
<module name="LocalFinalVariableName">
<!--<property name="format" value="^[A-Z][_A-Z0-9]*$"/>-->
</module>
<module name="LocalVariableName"/>
<module name="MemberName"/>
<module name="MethodName"/>
<module name="MethodTypeParameterName">
<property name="format" value="^[A-Z]+$"/>
</module>
<module name="PackageName"/>
<module name="ParameterName"/>
<module name="StaticVariableName"/>
<module name="TypeName"/>
<!-- Checks for imports -->
<!-- See http://checkstyle.sf.net/config_import.html -->
<module name="IllegalImport"/>
<!-- defaults to sun.* packages -->
<module name="RedundantImport"/>
<module name="UnusedImports"/>
<!-- Checks for Size Violations. -->
<!-- See http://checkstyle.sf.net/config_sizes.html -->
<module name="LineLength">
<property name="max" value="120"/>
</module>
<module name="MethodLength"/>
<module name="ParameterNumber"/>
<module name="OuterTypeNumber"/>
<!-- Checks for whitespace -->
<!-- See http://checkstyle.sf.net/config_whitespace.html -->
<module name="GenericWhitespace"/>
<module name="EmptyForIteratorPad"/>
<module name="MethodParamPad"/>
<module name="WhitespaceAround">
<property name="tokens" value="LITERAL_IF"/>
</module>
<module name="NoWhitespaceAfter">
<property name="tokens"
value="BNOT, DEC, DOT, INC, LNOT, UNARY_MINUS, UNARY_PLUS"/>
</module>
<module name="NoWhitespaceBefore"/>
<module name="OperatorWrap"/>
<module name="ParenPad"/>
<module name="TypecastParenPad"/>
<module name="WhitespaceAfter">
<property name="tokens" value="COMMA, SEMI"/>
</module>
<module name="Regexp">
<property name="format" value="[ \t]+$"/>
<property name="illegalPattern" value="true"/>
<property name="message" value="Trailing whitespace"/>
</module>
<!-- Modifier Checks -->
<!-- See http://checkstyle.sf.net/config_modifiers.html -->
<module name="ModifierOrder"/>
<module name="RedundantModifier"/>
<!-- Checks for blocks. You know, those {}'s -->
<!-- See http://checkstyle.sf.net/config_blocks.html -->
<module name="AvoidNestedBlocks"/>
<module name="EmptyBlock">
<!-- catch blocks need a statement or a comment. -->
<property name="option" value="text"/>
<property name="tokens" value="LITERAL_CATCH"/>
</module>
<module name="EmptyBlock">
<!-- all other blocks need a real statement. -->
<property name="option" value="stmt"/>
<property name="tokens" value="LITERAL_DO, LITERAL_ELSE, LITERAL_FINALLY,
LITERAL_IF, LITERAL_FOR, LITERAL_TRY, LITERAL_WHILE, INSTANCE_INIT,
STATIC_INIT"/>
</module>
<module name="LeftCurly"/>
<module name="NeedBraces"/>
<module name="RightCurly"/>
<!-- Checks for common coding problems -->
<!-- See http://checkstyle.sf.net/config_coding.html -->
<!-- module name="AvoidInlineConditionals"/-->
<!-- DoubleCheckedLocking check is no longer required. See http://checkstyle.sourceforge.net/releasenotes.html -->
<!-- module name="DoubleCheckedLocking"/-->
<module name="EmptyStatement"/>
<module name="EqualsHashCode"/>
<module name="StringLiteralEquality"/>
<module name="HiddenField">
<property name="ignoreConstructorParameter" value="true"/>
<property name="ignoreAbstractMethods" value="true"/>
<property name="ignoreSetter" value="true"/>
</module>
<module name="IllegalInstantiation"/>
<module name="InnerAssignment"/>
<module name="MissingSwitchDefault"/>
<module name="RedundantThrows"/>
<module name="SimplifyBooleanExpression"/>
<module name="SimplifyBooleanReturn"/>
<module name="DefaultComesLast"/>
<!-- Checks for class design -->
<!-- See http://checkstyle.sf.net/config_design.html -->
<module name="FinalClass"/>
<module name="HideUtilityClassConstructor"/>
<module name="InterfaceIsType"/>
<module name="VisibilityModifier">
<property name="protectedAllowed" value="true"/>
</module>
<module name="MissingOverride"/>
<!-- Miscellaneous other checks. -->
<!-- See http://checkstyle.sf.net/config_misc.html -->
<module name="ArrayTypeStyle"/>
<module name="ArrayTrailingComma"/>
<!--
This generates too many false-positives on wrapped 'throws' clauses
to be really useful. Disabled for now.
Falcon style is:
* Spaces, not tabs.
* Indent by four spaces.
* Indent by four spaces when wrapping a line.
-->
<module name="Indentation">
<property name="basicOffset" value="4"/>
<property name="caseIndent" value="0"/>
</module>
<module name="TodoComment"/>
<module name="UpperEll"/>
<module name="FileContentsHolder"/>
</module>
<!-- allow warnings to be suppressed -->
<module name="SuppressionCommentFilter">
<property name="offCommentFormat" value="SUSPEND CHECKSTYLE CHECK ParameterNumberCheck|VisibilityModifierCheck|HiddenFieldCheck|MethodName"/>
<property name="onCommentFormat" value="RESUME CHECKSTYLE CHECK ParameterNumberCheck|VisibilityModifierCheck|HiddenFieldCheck|MethodName"/>
<property name="checkFormat" value="ParameterNumberCheck|VisibilityModifierCheck|HiddenFieldCheck|MethodName"/>
</module>
</module>
......@@ -45,11 +45,6 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
</dependency>
......
......@@ -35,8 +35,8 @@
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
</dependency>
<dependency>
......
......@@ -19,6 +19,8 @@
package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.MetadataException;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import java.util.Map;
......@@ -67,4 +69,27 @@ public class AttributeInfo {
", reverseAttributeName='" + reverseAttributeName + '\'' +
'}';
}
public String toJson() throws JSONException {
JSONObject json = new JSONObject();
json.put("name", name);
json.put("multiplicity", multiplicity.toJson());
json.put("isComposite", isComposite);
json.put("isUnique", isUnique);
json.put("isIndexable", isIndexable);
json.put("dataType", dataType.getName());
json.put("reverseAttributeName", reverseAttributeName);
return json.toString();
}
public static AttributeDefinition fromJson(String jsonStr) throws JSONException {
JSONObject json = new JSONObject(jsonStr);
String reverseAttr = null;
if (json.has("reverseAttributeName")) {
reverseAttr = json.getString("reverseAttributeName");
}
return new AttributeDefinition(json.getString("name"), json.getString("dataType"),
Multiplicity.fromJson(json.getString("multiplicity")), json.getBoolean("isComposite"),
json.getBoolean("isUnique"), json.getBoolean("isIndexable"), reverseAttr);
}
}
......@@ -179,6 +179,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
ImmutableMap.Builder<String, ITypedStruct> b
= new ImmutableBiMap.Builder<String, ITypedStruct>();
if (traitNames != null) {
for (String t : traitNames) {
TraitType tType = typeSystem.getDataType(TraitType.class, t);
IStruct iTraitObject = r == null ? null : r.getTrait(t);
......@@ -186,6 +187,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
tType.convert(iTraitObject, Multiplicity.REQUIRED);
b.put(t, trait);
}
}
return new ReferenceableInstance(id == null ? new Id(getName()) : id,
getName(),
......
......@@ -33,12 +33,12 @@ import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class DataTypes {
public static BooleanType BOOLEAN_TYPE = new BooleanType();
;
public static ByteType BYTE_TYPE = new ByteType();
public static ShortType SHORT_TYPE = new ShortType();
public static IntType INT_TYPE = new IntType();
......@@ -78,7 +78,7 @@ public class DataTypes {
MAP,
STRUCT,
TRAIT,
CLASS;
CLASS
}
public static abstract class PrimitiveType<T> extends AbstractDataType<T> {
......@@ -469,7 +469,6 @@ public class DataTypes {
}
public static class ArrayType extends AbstractDataType<ImmutableCollection<?>> {
private final String nm;
private IDataType elemType;
......
......@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.metadata.MetadataException;
......@@ -52,11 +54,11 @@ public class EnumType extends AbstractDataType<EnumValue> {
if (val != null) {
EnumValue e = null;
if (val instanceof EnumValue) {
e = (EnumValue) val;
e = valueMap.get(((EnumValue)val).value);
} else if ( val instanceof Integer) {
e = ordinalMap.get((Integer)val);
e = ordinalMap.get(val);
} else if ( val instanceof String) {
e = valueMap.get((String)val);
e = valueMap.get(val);
} else if ( val instanceof Number ) {
e = ordinalMap.get(((Number)val).intValue());
}
......@@ -81,4 +83,8 @@ public class EnumType extends AbstractDataType<EnumValue> {
public EnumValue fromValue(String val) {
return valueMap.get(val.trim());
}
public ImmutableCollection<EnumValue> values() {
return valueMap.values();
}
}
......@@ -18,12 +18,18 @@
package org.apache.hadoop.metadata.typesystem.types;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import java.util.Map;
public final class Multiplicity {
public static final Multiplicity OPTIONAL = new Multiplicity(0, 1, false);
public static final Multiplicity REQUIRED = new Multiplicity(1, 1, false);
public static final Multiplicity COLLECTION = new Multiplicity(1, Integer.MAX_VALUE, false);
public static final Multiplicity SET = new Multiplicity(1, Integer.MAX_VALUE, true);
public final int lower;
public final int upper;
public final boolean isUnique;
......@@ -41,6 +47,28 @@ public final class Multiplicity {
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Multiplicity that = (Multiplicity) o;
if (isUnique != that.isUnique) return false;
if (lower != that.lower) return false;
if (upper != that.upper) return false;
return true;
}
@Override
public int hashCode() {
int result = lower;
result = 31 * result + upper;
result = 31 * result + (isUnique ? 1 : 0);
return result;
}
@Override
public String toString() {
return "Multiplicity{" +
"lower=" + lower +
......@@ -48,4 +76,17 @@ public final class Multiplicity {
", isUnique=" + isUnique +
'}';
}
public String toJson() throws JSONException {
JSONObject json = new JSONObject();
json.put("lower", lower);
json.put("upper", upper);
json.put("isUnique", isUnique);
return json.toString();
}
public static Multiplicity fromJson(String jsonStr) throws JSONException {
JSONObject json = new JSONObject(jsonStr);
return new Multiplicity(json.getInt("lower"), json.getInt("upper"), json.getBoolean("isUnique"));
}
}
......@@ -27,6 +27,7 @@ import java.lang.reflect.Constructor;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
......@@ -38,7 +39,6 @@ import java.util.concurrent.ConcurrentHashMap;
@Singleton
public class TypeSystem {
private static final TypeSystem INSTANCE = new TypeSystem();
public static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
private Map<String, IDataType> types;
......@@ -49,8 +49,9 @@ public class TypeSystem {
*/
private List<String> traitTypes;
private ImmutableList<String> coreTypes;
private TypeSystem() {
public TypeSystem() {
initialize();
}
......@@ -72,6 +73,11 @@ public class TypeSystem {
registerPrimitiveTypes();
registerCoreTypes();
coreTypes = ImmutableList.copyOf(types.keySet());
}
public ImmutableList<String> getCoreTypes() {
return coreTypes;
}
public ImmutableList<String> getTypeNames() {
......@@ -233,7 +239,9 @@ public class TypeSystem {
TransientTypeSystem transientTypes = new TransientTypeSystem(structDefs,
traitDefs,
classDefs);
return transientTypes.defineTypes();
Map<String, IDataType> definedTypes = transientTypes.defineTypes();
// LOG.debug("Defined new types " + Arrays.toString(definedTypes.keySet().toArray(new String[definedTypes.size()])));
return definedTypes;
}
public DataTypes.ArrayType defineArrayType(IDataType elemType) throws MetadataException {
......
......@@ -18,8 +18,11 @@
package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.TypesDef;
import scala.collection.JavaConversions;
import java.io.IOException;
import java.util.ArrayList;
......@@ -77,4 +80,10 @@ public class TypeUtils {
return ImmutableMap.copyOf(b);
}
public static TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums, ImmutableList<StructTypeDefinition> structs,
ImmutableList<HierarchicalTypeDefinition<TraitType>> traits,
ImmutableList<HierarchicalTypeDefinition<ClassType>> classes) {
return new TypesDef(JavaConversions.asScalaBuffer(enums), JavaConversions.asScalaBuffer(structs),
JavaConversions.asScalaBuffer(traits), JavaConversions.asScalaBuffer(classes));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.typesystem.types.store;
import com.google.common.collect.ImmutableList;
import com.google.inject.Singleton;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
@Singleton
public class HdfsStore extends TypeSystemStore {
public static final String LOCATION_PROPERTY = "metadata.typesystem.store.hdfs.location";
private static final Path LOCATION = new Path(System.getProperty(LOCATION_PROPERTY));
private static final Path ARCHIVE_LOCATION = new Path(LOCATION, "ARCHIVE");
private static final PathFilter PATH_FILTER = new PathFilter() {
@Override
public boolean accept(Path path) {
String name = path.getName();
return !name.startsWith(".") && !name.startsWith("_") &&
!name.equals(ARCHIVE_LOCATION.getName());
}
};
private static final String UTF8_ENCODING = "UTF8";
private static final HdfsStore INSTANCE = new HdfsStore();
public static HdfsStore getInstance() {
return INSTANCE;
}
@Override
protected void publish(String namespace, String json) throws StorageException {
FSDataOutputStream stream = null;
FileSystem fs = null;
try {
fs = LOCATION.getFileSystem(new Configuration());
Path jsonFile = new Path(LOCATION, namespace + ".json");
if (fs.exists(jsonFile)) {
//TODO check if the new json is same and skip update?
archive(namespace);
}
mkdir(fs, jsonFile.getParent());
stream = fs.create(jsonFile);
IOUtils.copy(new StringReader(json), stream, UTF8_ENCODING);
} catch (IOException e) {
throw new StorageException(namespace, e);
} finally {
IOUtils.closeQuietly(stream);
closeQuietly(fs);
}
}
@Override
public synchronized void delete(String namespace) throws StorageException {
archive(namespace);
}
private void archive(String namespace) throws StorageException {
FileSystem fs = null;
try {
fs = LOCATION.getFileSystem(new Configuration());
Path jsonFile = new Path(LOCATION, namespace + ".json");
Path archivePath = new Path(ARCHIVE_LOCATION,
jsonFile.getName() + System.currentTimeMillis());
mkdir(fs, archivePath.getParent());
if (!fs.rename(jsonFile, archivePath)) {
throw new StorageException(namespace);
}
} catch (IOException e) {
throw new StorageException(namespace, e);
} finally {
closeQuietly(fs);
}
}
private void mkdir(FileSystem fs, Path path) throws StorageException {
try {
if (!fs.exists(path) && !fs.mkdirs(path)) {
throw new StorageException("Failed to create " + path);
}
} catch (IOException e) {
throw new StorageException(e);
}
}
@Override
protected String fetch(String namespace) throws StorageException {
FileSystem fs = null;
FSDataInputStream stream = null;
try {
fs = LOCATION.getFileSystem(new Configuration());
Path file = new Path(LOCATION, namespace + ".json");
stream = fs.open(file);
return IOUtils.toString(stream, UTF8_ENCODING);
} catch (IOException e) {
throw new StorageException(namespace, e);
} finally {
IOUtils.closeQuietly(stream);
closeQuietly(fs);
}
}
private void closeQuietly(FileSystem fs) throws StorageException {
if (fs != null) {
try {
fs.close();
} catch (IOException e) {
throw new StorageException(e);
}
}
}
@Override
protected ImmutableList<String> listNamespaces() throws StorageException {
FileSystem fs = null;
try {
fs = LOCATION.getFileSystem(new Configuration());
FileStatus[] files = fs.listStatus(LOCATION, PATH_FILTER);
List<String> nameSpaces = new ArrayList<>();
for (FileStatus file : files) {
nameSpaces.add(StringUtils.removeEnd(file.getPath().getName(), ".json"));
}
return ImmutableList.copyOf(nameSpaces);
} catch (IOException e) {
throw new StorageException("list", e);
} finally {
closeQuietly(fs);
}
}
}
......@@ -56,12 +56,12 @@
<logger name="AUDIT" additivity="false">
<level value="debug"/>
<appender-ref ref="console"/>
<appender-ref ref="AUDIT"/>
</logger>
<logger name="METRIC" additivity="false">
<level value="debug"/>
<appender-ref ref="console"/>
<appender-ref ref="METRIC"/>
</logger>
<logger name="org.apache.hadoop.metadata" additivity="false">
......@@ -86,7 +86,7 @@
<root>
<priority value="debug"/>
<appender-ref ref="console"/>
<appender-ref ref="FILE"/>
</root>
</log4j:configuration>
......@@ -18,13 +18,16 @@
package org.apache.hadoop.metadata.typesystem
import com.google.common.collect.ImmutableList
import org.apache.hadoop.metadata.typesystem.types._
import scala.collection.JavaConversions
case class TypesDef(enumTypes: Seq[EnumTypeDefinition],
structTypes: Seq[StructTypeDefinition],
traitTypes: Seq[HierarchicalTypeDefinition[TraitType]],
classTypes: Seq[HierarchicalTypeDefinition[ClassType]]) {
def this() = this(Seq(), Seq(), Seq(), Seq())
def this(enumType : EnumTypeDefinition) = this(Seq(enumType), Seq(), Seq(), Seq())
def this(structType: StructTypeDefinition) = this(Seq(), Seq(structType), Seq(), Seq())
def this(typ: HierarchicalTypeDefinition[_], isTrait : Boolean) = this(
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.typesystem.types.store;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.metadata.typesystem.types.BaseTest;
import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.junit.Before;
import org.junit.Test;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
public class HdfsStoreTest extends BaseTest {
private static final String LOCATION = "target/type-store";
@Before
public void setup() throws Exception {
super.setup();
System.setProperty(HdfsStore.LOCATION_PROPERTY, LOCATION);
FileSystem fs = FileSystem.get(new Configuration());
fs.delete(new Path(LOCATION), true);
//define type system
HierarchicalTypeDefinition<TraitType> tagTypeDefinition =
TypesUtil.createTraitTypeDef("tag",
ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE));
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
createClassTypeDef("database",
ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("description", DataTypes.STRING_TYPE),
createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
TypeSystem.getInstance().defineTypes(
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(tagTypeDefinition),
ImmutableList.of(databaseTypeDefinition));
}
@Test
public void testStore() throws Exception {
TypeSystemStore store = new HdfsStore();
TypeSystem typeSystem = TypeSystem.getInstance();
store.store(typeSystem, "hive");
FileSystem fs = FileSystem.get(new Configuration());
Assert.assertTrue(fs.exists(new Path(LOCATION, "hive.json")));
TypesDef typeDef = store.restore("hive");
Assert.assertNotNull(typeDef);
Assert.assertEquals(1, typeDef.classTypesAsJavaList().size());
Assert.assertEquals("database", typeDef.classTypesAsJavaList().get(0).typeName);
}
@Test
public void testRestore() throws Exception {
TypeSystemStore store = new HdfsStore();
TypeSystem typeSystem = TypeSystem.getInstance();
store.store(typeSystem, "hive");
store.store(typeSystem, "falcon");
ImmutableMap<String, TypesDef> typeDef = store.restore();
Assert.assertEquals(2, typeDef.size());
Assert.assertEquals(1, typeDef.get("falcon").classTypesAsJavaList().size());
Assert.assertEquals("database",
typeDef.get("falcon").classTypesAsJavaList().get(0).typeName);
}
@Test
public void testArchive() throws Exception {
TypeSystemStore store = new HdfsStore();
TypeSystem typeSystem = TypeSystem.getInstance();
store.store(typeSystem, "hive"); //insert
store.store(typeSystem, "hive"); //update
FileSystem fs = FileSystem.get(new Configuration());
Assert.assertTrue(fs.exists(new Path(LOCATION, "hive.json")));
FileStatus[] files = fs.listStatus(new Path(LOCATION, "ARCHIVE"));
Assert.assertEquals(1, files.length);
Assert.assertTrue(files[0].getPath().getName().startsWith("hive.json"));
}
@Test
public void testDelete() throws Exception {
TypeSystemStore store = new HdfsStore();
TypeSystem typeSystem = TypeSystem.getInstance();
store.store(typeSystem, "hive");
store.delete("hive");
FileSystem fs = FileSystem.get(new Configuration());
Assert.assertFalse(fs.exists(new Path(LOCATION, "hive.json")));
FileStatus[] files = fs.listStatus(new Path(LOCATION, "ARCHIVE"));
Assert.assertEquals(1, files.length);
Assert.assertTrue(files[0].getPath().getName().startsWith("hive.json"));
}
}
......@@ -33,21 +33,9 @@
<name>Apache Metadata Web Application</name>
<packaging>war</packaging>
<profiles>
<profile>
<id>hadoop-2</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</profile>
</profiles>
<properties>
<debug.jetty.daemon>true</debug.jetty.daemon>
</properties>
<dependencies>
<dependency>
......@@ -152,6 +140,11 @@
<artifactId>jersey-guice</artifactId>
<version>1.18.3</version>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jsp-2.1</artifactId>
</dependency>
</dependencies>
<build>
......@@ -275,7 +268,7 @@
<artifactId>maven-jetty-plugin</artifactId>
<version>${jetty.version}</version>
<configuration>
<skip>${skipITs}</skip>
<skip>${skipTests}</skip>
<!--only skip int tests -->
<connectors>
<!--
......@@ -299,22 +292,6 @@
<useTestClasspath>true</useTestClasspath>
<systemProperties>
<systemProperty>
<name>hadoop.conf.dir</name>
<value>${project.build.directory}/webapps/hadoop/conf</value>
</systemProperty>
<systemProperty>
<name>hadoop.tmp.dir</name>
<value>${project.build.directory}/tmp-hadoop-${user.name}</value>
</systemProperty>
<systemProperty>
<name>hadoop.log.dir</name>
<value>${project.build.directory}/logs</value>
</systemProperty>
<systemProperty>
<name>system.lib.location</name>
<value>${project.build.directory}/dependency</value>
</systemProperty>
<systemProperty>
<name>keystore.file</name>
<value>
${project.build.directory}/../../webapp/target/metadata.keystore
......@@ -329,6 +306,7 @@
</systemProperties>
<stopKey>metadata-stop</stopKey>
<stopPort>41001</stopPort>
<daemon>${debug.jetty.daemon}</daemon>
</configuration>
<executions>
<execution>
......
......@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.web.filters;
import com.google.inject.Singleton;
import org.apache.hadoop.metadata.web.util.DateTimeHelper;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.slf4j.Logger;
......@@ -39,6 +40,7 @@ import java.util.UUID;
* This records audit information as part of the filter after processing the request
* and also introduces a UUID into request and response for tracing requests in logs.
*/
@Singleton
public class AuditFilter implements Filter {
private static final Logger AUDIT_LOG = LoggerFactory.getLogger("AUDIT");
......@@ -61,10 +63,9 @@ public class AuditFilter implements Filter {
try {
currentThread.setName(formatName(oldName, requestId));
recordAudit(httpRequest, requestTimeISO9601);
filterChain.doFilter(request, response);
} finally {
recordAudit(httpRequest, requestTimeISO9601);
// put the request id into the response so users can trace logs for this request
((HttpServletResponse) response).setHeader(Servlets.REQUEST_ID, requestId);
currentThread.setName(oldName);
......
......@@ -24,10 +24,16 @@ import com.google.inject.servlet.GuiceServletContextListener;
import com.sun.jersey.api.core.PackagesResourceConfig;
import com.sun.jersey.guice.JerseyServletModule;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore;
import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.web.filters.AuditFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ServletContextEvent;
import java.util.HashMap;
import java.util.Map;
......@@ -51,6 +57,8 @@ public class GuiceServletConfig extends GuiceServletContextListener {
new JerseyServletModule() {
@Override
protected void configureServlets() {
filter("/*").through(AuditFilter.class);
String packages = getServletContext().getInitParameter(GUICE_CTX_PARAM);
LOG.info("Jersey loading from packages: " + packages);
......@@ -65,4 +73,30 @@ public class GuiceServletConfig extends GuiceServletContextListener {
return injector;
}
@Override
public void contextInitialized(ServletContextEvent servletContextEvent) {
super.contextInitialized(servletContextEvent);
restoreTypeSystem();
}
private void restoreTypeSystem() {
LOG.info("Restoring type system from the store");
Injector injector = getInjector();
ITypeStore typeStore = injector.getInstance(ITypeStore.class);
try {
TypesDef typesDef = typeStore.restore();
TypeSystem typeSystem = injector.getInstance(TypeSystem.class);
typeSystem.defineTypes(typesDef);
} catch (MetadataException e) {
throw new RuntimeException(e);
}
LOG.info("Restored type system from the store");
}
@Override
public void contextDestroyed(ServletContextEvent servletContextEvent) {
super.contextDestroyed(servletContextEvent);
}
}
\ No newline at end of file
......@@ -187,6 +187,35 @@ public class EntityResource {
}
}
/**
* Adds property to the given entity id
* @param guid entity id
* @param property property to add
* @param value property's value
* @return
*/
@PUT
@Path("addProperty/{guid}")
@Produces(MediaType.APPLICATION_JSON)
public Response addProperty(@PathParam("guid") String guid, @QueryParam("property") String property,
@QueryParam("value") String value) {
try {
metadataService.addProperty(guid, property, value);
JSONObject response = new JSONObject();
response.put("requestId", Thread.currentThread().getName());
return Response.ok(response).build();
} catch (MetadataException e) {
LOG.error("Unable to add property {} to entity id {}", property, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
LOG.error("Unable to add property {} to entity id {}", property, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
// Trait management functions
/**
* Gets the list of trait names for a given entity represented by a guid.
......
......@@ -38,7 +38,6 @@ public abstract class BaseResourceIT {
protected TypeSystem typeSystem;
protected WebResource service;
@BeforeClass
public void setUp() throws Exception {
typeSystem = TypeSystem.getInstance();
typeSystem.reset();
......
......@@ -26,6 +26,7 @@ import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.json.Serialization;
import org.apache.hadoop.metadata.typesystem.json.Serialization$;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$;
......@@ -42,6 +43,7 @@ import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -79,48 +81,88 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
submitTypes();
}
@Test
public void testSubmitEntity() throws Exception {
tableInstance = createHiveTableInstance();
String tableInstanceAsJSON = Serialization$.MODULE$.toJson(tableInstance);
LOG.debug("tableInstance = " + tableInstanceAsJSON);
private ClientResponse submit(ITypedReferenceableInstance instance) {
String instanceAsJSON = Serialization$.MODULE$.toJson(instance);
LOG.debug("instanceAsJSON = " + instanceAsJSON);
WebResource resource = service
.path("api/metadata/entities/submit")
.path(TABLE_TYPE);
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.path(instance.getTypeName());
return resource.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, tableInstanceAsJSON);
.method(HttpMethod.POST, ClientResponse.class, instanceAsJSON);
}
@Test
public void testSubmitEntity() throws Exception {
tableInstance = createHiveTableInstance();
ClientResponse clientResponse = submit(tableInstance);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
guid = getGuid(clientResponse);
try {
Assert.assertNotNull(UUID.fromString(guid));
} catch (IllegalArgumentException e) {
Assert.fail("Response is not a guid, " + clientResponse.getEntity(String.class));
}
}
private String getGuid(ClientResponse clientResponse) throws JSONException {
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
guid = response.get(Servlets.RESULTS).toString();
String guid = response.get(Servlets.RESULTS).toString();
Assert.assertNotNull(guid);
return guid;
}
try {
Assert.assertNotNull(UUID.fromString(guid));
} catch (IllegalArgumentException e) {
Assert.fail("Response is not a guid, " + response);
@Test (dependsOnMethods = "testSubmitEntity")
public void testAddProperty() throws Exception {
//add property
String description = "bar table - new desc";
ClientResponse clientResponse = addProperty(guid, "description", description);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
ITypedReferenceableInstance entityRef = getEntityDefinition(getEntityDefinition(guid));
Assert.assertEquals(entityRef.get("description"), description);
tableInstance.set("description", description);
//invalid property for the type
clientResponse = addProperty(guid, "invalid_property", "bar table");
Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
//non-string property, update
clientResponse = addProperty(guid, "level", "4");
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
entityRef = getEntityDefinition(getEntityDefinition(guid));
Assert.assertEquals(entityRef.get("level"), 4);
tableInstance.set("level", 4);
}
@Test (dependsOnMethods = "testSubmitEntity")
public void testAddReferenceProperty() throws Exception {
//Create new db instance
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
databaseInstance.set("name", "newdb");
databaseInstance.set("description", "new database");
ClassType classType = typeSystem.getDataType(ClassType.class, DATABASE_TYPE);
ITypedReferenceableInstance dbInstance = classType.convert(databaseInstance, Multiplicity.REQUIRED);
ClientResponse clientResponse = submit(dbInstance);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String dbId = getGuid(clientResponse);
//Add reference property
clientResponse = addProperty(guid, "database", dbId);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
}
@Test(dependsOnMethods = "testSubmitEntity")
public void testGetEntityDefinition() throws Exception {
WebResource resource = service
.path("api/metadata/entities/definition")
.path(guid);
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
ClientResponse clientResponse = getEntityDefinition(guid);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
......@@ -139,6 +181,34 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertTrue(areEqual(tableInstance, tableInstanceAfterGet));
}
private ClientResponse addProperty(String guid, String property, String value) {
WebResource resource = service
.path("api/metadata/entities/addProperty")
.path(guid);
return resource.queryParam("property", property).queryParam("value", value)
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.PUT, ClientResponse.class);
}
private ClientResponse getEntityDefinition(String guid) {
WebResource resource = service
.path("api/metadata/entities/definition")
.path(guid);
return resource.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
}
private ITypedReferenceableInstance getEntityDefinition(ClientResponse clientResponse) throws Exception {
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
JSONObject response = new JSONObject(clientResponse.getEntity(String.class));
final String definition = response.getString(Servlets.RESULTS);
Assert.assertNotNull(definition);
return Serialization.fromJson(definition);
}
private boolean areEqual(ITypedInstance actual,
ITypedInstance expected) throws Exception {
/*
......@@ -317,7 +387,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
ClientResponse clientResponse = service
.path("api/metadata/entities/traits/add")
.path(guid)
.path("random")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON);
......@@ -353,7 +423,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
ClientResponse clientResponse = service
.path("api/metadata/entities/traits/delete")
.path(guid)
.path("random")
.path(traitName)
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
......@@ -388,8 +458,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
TypesUtil.createClassTypeDef(TABLE_TYPE,
ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE),
new AttributeDefinition("tableType", "tableType",
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("serde1",
......@@ -452,6 +523,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table");
tableInstance.set("type", "managed");
tableInstance.set("level", 2);
tableInstance.set("tableType", 1); // enum
tableInstance.set("database", databaseInstance);
......
......@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.web.resources;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import javax.ws.rs.HttpMethod;
......@@ -33,6 +34,12 @@ import javax.ws.rs.core.Response;
@Test
public class RexsterGraphJerseyResourceIT extends BaseResourceIT {
@BeforeClass
@Override
public void setUp() throws Exception {
super.setUp();
}
@Test(enabled = false)
public void testGetVertex() throws Exception {
// todo: add a vertex before fetching it
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment