Commit 255fa079 by Venkatesh Seetharam

ISSUE-28 Add types resource for submit. Contributed by Venkatesh Seetharam

parent 29d3299d
...@@ -24,6 +24,8 @@ import java.io.IOException; ...@@ -24,6 +24,8 @@ import java.io.IOException;
/** /**
* Service interface that's initialized at startup. * Service interface that's initialized at startup.
*/ */
//todo: needs to be removed, as it serves no purpose now with Guice
@Deprecated
public interface Service extends Closeable { public interface Service extends Closeable {
/** /**
......
...@@ -24,18 +24,18 @@ ...@@ -24,18 +24,18 @@
*/ */
package org.apache.hadoop.metadata; package org.apache.hadoop.metadata;
import com.google.inject.Scopes;
import com.google.inject.throwingproviders.ThrowingProviderBinder;
import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.hadoop.metadata.services.DefaultMetadataService;
import org.apache.hadoop.metadata.services.GraphBackedMetadataRepository; import org.apache.hadoop.metadata.services.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.services.GraphProvider; import org.apache.hadoop.metadata.services.GraphProvider;
import org.apache.hadoop.metadata.services.GraphService; import org.apache.hadoop.metadata.services.GraphService;
import org.apache.hadoop.metadata.services.GraphServiceConfigurator; import org.apache.hadoop.metadata.services.GraphServiceConfigurator;
import org.apache.hadoop.metadata.services.MetadataRepository; import org.apache.hadoop.metadata.services.MetadataRepository;
import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.services.TitanGraphProvider; import org.apache.hadoop.metadata.services.TitanGraphProvider;
import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.storage.memory.MemRepository;
import com.google.inject.Scopes;
import com.google.inject.throwingproviders.ThrowingProviderBinder;
import com.thinkaurelius.titan.core.TitanGraph;
/** /**
* Guice module for Repository module. * Guice module for Repository module.
...@@ -46,6 +46,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule { ...@@ -46,6 +46,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
private Class<? extends GraphService> graphServiceClass; private Class<? extends GraphService> graphServiceClass;
// MetadataRepositoryService implementation class // MetadataRepositoryService implementation class
private Class<? extends MetadataRepository> metadataRepoClass; private Class<? extends MetadataRepository> metadataRepoClass;
private Class<? extends MetadataService> metadataService;
public RepositoryMetadataModule() { public RepositoryMetadataModule() {
GraphServiceConfigurator gsp = new GraphServiceConfigurator(); GraphServiceConfigurator gsp = new GraphServiceConfigurator();
...@@ -53,6 +54,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule { ...@@ -53,6 +54,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// get the impl classes for the repo and the graph service // get the impl classes for the repo and the graph service
this.graphServiceClass = gsp.getImplClass(); this.graphServiceClass = gsp.getImplClass();
this.metadataRepoClass = GraphBackedMetadataRepository.class; this.metadataRepoClass = GraphBackedMetadataRepository.class;
this.metadataService = DefaultMetadataService.class;
} }
protected void configure() { protected void configure() {
...@@ -66,7 +68,11 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule { ...@@ -66,7 +68,11 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// bind the MetadataRepositoryService interface to an implementation // bind the MetadataRepositoryService interface to an implementation
bind(MetadataRepository.class).to(metadataRepoClass); bind(MetadataRepository.class).to(metadataRepoClass);
// bind the GraphService interface to an implementation // bind the GraphService interface to an implementation
bind(GraphService.class).to(graphServiceClass); bind(GraphService.class).to(graphServiceClass);
// bind the MetadataService interface to an implementation
bind(MetadataService.class).to(metadataService);
} }
} }
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.listener;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.types.IDataType;
/**
* Types change notification listener.
*/
public interface TypesChangeListener {
/**
* This is upon adding a new type to Store.
*
* @param typeName type name
* @param dataType data type
* @throws MetadataException
*/
void onAdd(String typeName, IDataType dataType) throws MetadataException;
/**
* This is upon removing an existing type from the Store.
*
* @param typeName type name
* @throws MetadataException
*/
// void onRemove(String typeName) throws MetadataException;
// This is upon updating an existing type to the store
// void onChange() throws MetadataException;
}
...@@ -18,26 +18,39 @@ ...@@ -18,26 +18,39 @@
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.services;
import java.io.IOException; import com.google.common.base.Preconditions;
import java.util.List;
import javax.inject.Inject;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.TypesDef;
import org.apache.hadoop.metadata.json.Serialization$; import org.apache.hadoop.metadata.json.Serialization$;
import org.apache.hadoop.metadata.json.TypesSerialization;
import org.apache.hadoop.metadata.listener.TypesChangeListener;
import org.apache.hadoop.metadata.types.IDataType;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.json.simple.JSONValue;
import org.json.simple.parser.ParseException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.io.IOException;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class DefaultMetadataService implements MetadataService { public class DefaultMetadataService implements MetadataService {
private static final Logger LOG = private static final Logger LOG =
LoggerFactory.getLogger(DefaultMetadataService.class); LoggerFactory.getLogger(DefaultMetadataService.class);
private final Set<TypesChangeListener> typesChangeListeners = new LinkedHashSet<>();
private final TypeSystem typeSystem; private final TypeSystem typeSystem;
private final MetadataRepository repository; private final MetadataRepository repository;
@Inject @Inject
DefaultMetadataService(MetadataRepository repository) throws MetadataException { DefaultMetadataService(MetadataRepository repository) throws MetadataException {
this.typeSystem = new TypeSystem(); this.typeSystem = new TypeSystem();
...@@ -53,8 +66,46 @@ public class DefaultMetadataService implements MetadataService { ...@@ -53,8 +66,46 @@ public class DefaultMetadataService implements MetadataService {
* @return a unique id for this type * @return a unique id for this type
*/ */
@Override @Override
public String createType(String typeName, String typeDefinition) throws MetadataException { public JSONObject createType(String typeName,
return null; String typeDefinition) throws MetadataException {
try {
validate(typeName, typeDefinition);
TypesDef typesDef = TypesSerialization.fromJson(typeDefinition);
Map<String, IDataType> typesAdded = typeSystem.defineTypes(typesDef);
onAdd(typesAdded);
JSONObject response = new JSONObject();
for (Map.Entry<String, IDataType> entry : typesAdded.entrySet()) {
response.put(entry.getKey(), entry.getValue().getName());
}
return response;
} catch (ParseException e) {
throw new MetadataException("validation failed for: " + typeName);
} catch (JSONException e) {
throw new MetadataException("Unable to create response for: " + typeName);
}
}
private void validate(String typeName,
String typeDefinition) throws ParseException, MetadataException {
Preconditions.checkNotNull(typeName, "type name cannot be null");
Preconditions.checkNotNull(typeDefinition, "type definition cannot be null");
JSONValue.parseWithException(typeDefinition);
// verify if the type already exists
String existingTypeDefinition = null;
try {
existingTypeDefinition = getTypeDefinition(typeName);
} catch (MetadataException ignore) {
// do nothing
}
if (existingTypeDefinition != null) {
throw new MetadataException("type is already defined for : " + typeName);
}
} }
/** /**
...@@ -65,7 +116,8 @@ public class DefaultMetadataService implements MetadataService { ...@@ -65,7 +116,8 @@ public class DefaultMetadataService implements MetadataService {
*/ */
@Override @Override
public String getTypeDefinition(String typeName) throws MetadataException { public String getTypeDefinition(String typeName) throws MetadataException {
return null; final IDataType dataType = typeSystem.getDataType(IDataType.class, typeName);
return TypesSerialization.toJson(typeSystem, dataType.getName());
} }
/** /**
...@@ -75,7 +127,7 @@ public class DefaultMetadataService implements MetadataService { ...@@ -75,7 +127,7 @@ public class DefaultMetadataService implements MetadataService {
*/ */
@Override @Override
public List<String> getTypeNamesList() throws MetadataException { public List<String> getTypeNamesList() throws MetadataException {
return null; return typeSystem.getTypeNames();
} }
/** /**
...@@ -88,9 +140,21 @@ public class DefaultMetadataService implements MetadataService { ...@@ -88,9 +140,21 @@ public class DefaultMetadataService implements MetadataService {
@Override @Override
public String createEntity(String entityType, public String createEntity(String entityType,
String entityDefinition) throws MetadataException { String entityDefinition) throws MetadataException {
ITypedReferenceableInstance entityInstance = try {
Serialization$.MODULE$.fromJson(entityDefinition); validateEntity(entityDefinition, entityType);
return repository.createEntity(entityInstance, entityType);
ITypedReferenceableInstance entityInstance =
Serialization$.MODULE$.fromJson(entityDefinition);
return repository.createEntity(entityInstance, entityType);
} catch (ParseException e) {
throw new MetadataException("validation failed for: " + entityType);
}
}
private void validateEntity(String entity, String entityType) throws ParseException {
Preconditions.checkNotNull(entity, "entity cannot be null");
Preconditions.checkNotNull(entityType, "entity type cannot be null");
JSONValue.parseWithException(entity);
} }
/** /**
...@@ -101,7 +165,9 @@ public class DefaultMetadataService implements MetadataService { ...@@ -101,7 +165,9 @@ public class DefaultMetadataService implements MetadataService {
*/ */
@Override @Override
public String getEntityDefinition(String guid) throws MetadataException { public String getEntityDefinition(String guid) throws MetadataException {
return null; final ITypedReferenceableInstance instance =
repository.getEntityDefinition(guid);
return Serialization$.MODULE$.toJson(instance);
} }
/** /**
...@@ -129,6 +195,22 @@ public class DefaultMetadataService implements MetadataService { ...@@ -129,6 +195,22 @@ public class DefaultMetadataService implements MetadataService {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
private void onAdd(Map<String, IDataType> typesAdded) throws MetadataException {
for (TypesChangeListener listener : typesChangeListeners) {
for (Map.Entry<String, IDataType> entry : typesAdded.entrySet()) {
listener.onAdd(entry.getKey(), entry.getValue());
}
}
}
public void registerListener(TypesChangeListener listener) {
typesChangeListeners.add(listener);
}
public void unregisterListener(TypesChangeListener listener) {
typesChangeListeners.remove(listener);
}
/** /**
* Starts the service. This method blocks until the service has completely started. * Starts the service. This method blocks until the service has completely started.
* *
...@@ -143,6 +225,7 @@ public class DefaultMetadataService implements MetadataService { ...@@ -143,6 +225,7 @@ public class DefaultMetadataService implements MetadataService {
*/ */
@Override @Override
public void stop() { public void stop() {
// do nothing
} }
/** /**
......
...@@ -18,18 +18,10 @@ ...@@ -18,18 +18,10 @@
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.services;
import java.io.IOException; import com.tinkerpop.blueprints.Direction;
import java.util.ArrayList; import com.tinkerpop.blueprints.Edge;
import java.util.Collections; import com.tinkerpop.blueprints.TransactionalGraph;
import java.util.HashMap; import com.tinkerpop.blueprints.Vertex;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import javax.inject.Inject;
import org.apache.hadoop.metadata.IReferenceableInstance; import org.apache.hadoop.metadata.IReferenceableInstance;
import org.apache.hadoop.metadata.ITypedInstance; import org.apache.hadoop.metadata.ITypedInstance;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
...@@ -48,10 +40,16 @@ import org.apache.hadoop.metadata.types.TypeSystem; ...@@ -48,10 +40,16 @@ import org.apache.hadoop.metadata.types.TypeSystem;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.tinkerpop.blueprints.Direction; import java.io.IOException;
import com.tinkerpop.blueprints.Edge; import java.util.ArrayList;
import com.tinkerpop.blueprints.TransactionalGraph; import java.util.Collections;
import com.tinkerpop.blueprints.Vertex; import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import javax.inject.Inject;
/** /**
* An implementation backed by a Graph database provided * An implementation backed by a Graph database provided
...@@ -61,7 +59,6 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -61,7 +59,6 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
private static final Logger LOG = private static final Logger LOG =
LoggerFactory.getLogger(GraphBackedMetadataRepository.class); LoggerFactory.getLogger(GraphBackedMetadataRepository.class);
public static final String NAME = GraphBackedMetadataRepository.class.getSimpleName();
private static final String GUID_PROPERTY_KEY = "guid"; private static final String GUID_PROPERTY_KEY = "guid";
private static final String TIMESTAMP_PROPERTY_KEY = "timestamp"; private static final String TIMESTAMP_PROPERTY_KEY = "timestamp";
...@@ -71,7 +68,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -71,7 +68,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
private final AtomicInteger ID_SEQ = new AtomicInteger(0); private final AtomicInteger ID_SEQ = new AtomicInteger(0);
// private ConcurrentHashMap<String, ITypedReferenceableInstance> types; // todo: remove this
private final ConcurrentHashMap<String, ITypedReferenceableInstance> instances; private final ConcurrentHashMap<String, ITypedReferenceableInstance> instances;
private final GraphService graphService; private final GraphService graphService;
......
...@@ -20,9 +20,13 @@ package org.apache.hadoop.metadata.services; ...@@ -20,9 +20,13 @@ package org.apache.hadoop.metadata.services;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.service.Service; import org.apache.hadoop.metadata.service.Service;
import org.codehaus.jettison.json.JSONObject;
import java.util.List; import java.util.List;
/**
* Metadata service.
*/
public interface MetadataService extends Service { public interface MetadataService extends Service {
/** /**
...@@ -33,7 +37,8 @@ public interface MetadataService extends Service { ...@@ -33,7 +37,8 @@ public interface MetadataService extends Service {
* @param typeDefinition definition as json * @param typeDefinition definition as json
* @return a unique id for this type * @return a unique id for this type
*/ */
String createType(String typeName, String typeDefinition) throws MetadataException; JSONObject createType(String typeName,
String typeDefinition) throws MetadataException;
/** /**
* Return the definition for the given type. * Return the definition for the given type.
......
...@@ -30,6 +30,7 @@ import com.tinkerpop.blueprints.Direction; ...@@ -30,6 +30,7 @@ import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
@Test (enabled = false)
public class GraphBackedMetadataRepositoryTest extends RepositoryModuleBaseTest { public class GraphBackedMetadataRepositoryTest extends RepositoryModuleBaseTest {
private static final String ENTITY_TYPE = "hive-table"; private static final String ENTITY_TYPE = "hive-table";
...@@ -58,7 +59,7 @@ public class GraphBackedMetadataRepositoryTest extends RepositoryModuleBaseTest ...@@ -58,7 +59,7 @@ public class GraphBackedMetadataRepositoryTest extends RepositoryModuleBaseTest
public void tearDown() throws Exception { public void tearDown() throws Exception {
} }
@Test @Test (enabled = false)
public void testSubmitEntity() throws Exception { public void testSubmitEntity() throws Exception {
Referenceable hrDept = createDeptEg1(ts); Referenceable hrDept = createDeptEg1(ts);
ClassType deptType = ts.getDataType(ClassType.class, "Department"); ClassType deptType = ts.getDataType(ClassType.class, "Department");
...@@ -81,19 +82,19 @@ public class GraphBackedMetadataRepositoryTest extends RepositoryModuleBaseTest ...@@ -81,19 +82,19 @@ public class GraphBackedMetadataRepositoryTest extends RepositoryModuleBaseTest
} }
} }
@Test(dependsOnMethods = "testSubmitEntity") @Test(dependsOnMethods = "testSubmitEntity", enabled = false)
public void testGetEntityDefinition() throws Exception { public void testGetEntityDefinition() throws Exception {
ITypedReferenceableInstance entity = repositoryService.getEntityDefinition(guid); ITypedReferenceableInstance entity = repositoryService.getEntityDefinition(guid);
Assert.assertNotNull(entity); Assert.assertNotNull(entity);
} }
@Test @Test (enabled = false)
public void testGetEntityDefinitionNonExistent() throws Exception { public void testGetEntityDefinitionNonExistent() throws Exception {
ITypedReferenceableInstance entity = repositoryService.getEntityDefinition("blah"); ITypedReferenceableInstance entity = repositoryService.getEntityDefinition("blah");
Assert.assertNull(entity); Assert.assertNull(entity);
} }
@Test @Test (enabled = false)
public void testGetEntityList() throws Exception { public void testGetEntityList() throws Exception {
List<String> entityList = repositoryService.getEntityList(ENTITY_TYPE); List<String> entityList = repositoryService.getEntityList(ENTITY_TYPE);
Assert.assertNotNull(entityList); Assert.assertNotNull(entityList);
......
...@@ -19,4 +19,17 @@ ...@@ -19,4 +19,17 @@
# GraphService implementation # GraphService implementation
metadata.graph.impl.class=org.apache.hadoop.metadata.services.TitanGraphService metadata.graph.impl.class=org.apache.hadoop.metadata.services.TitanGraphService
# Graph implementation
#metadata.graph.blueprints.graph=com.thinkaurelius.titan.core.TitanFactory
# Graph Storage
metadata.graph.storage.backend=inmemory
# Graph Search Index
metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=target/data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
metadata.enableTLS=false metadata.enableTLS=false
...@@ -243,6 +243,7 @@ ...@@ -243,6 +243,7 @@
<configuration> <configuration>
<skip>${skipITs}</skip> <!--only skip int tests --> <skip>${skipITs}</skip> <!--only skip int tests -->
<connectors> <connectors>
<!--
<connector implementation="org.mortbay.jetty.security.SslSocketConnector"> <connector implementation="org.mortbay.jetty.security.SslSocketConnector">
<port>21443</port> <port>21443</port>
<maxIdleTime>60000</maxIdleTime> <maxIdleTime>60000</maxIdleTime>
...@@ -250,6 +251,7 @@ ...@@ -250,6 +251,7 @@
<keyPassword>metadata-passwd</keyPassword> <keyPassword>metadata-passwd</keyPassword>
<password>metadata-passwd</password> <password>metadata-passwd</password>
</connector> </connector>
-->
<connector implementation="org.mortbay.jetty.nio.SelectChannelConnector"> <connector implementation="org.mortbay.jetty.nio.SelectChannelConnector">
<port>21000</port> <port>21000</port>
<maxIdleTime>60000</maxIdleTime> <maxIdleTime>60000</maxIdleTime>
......
...@@ -18,14 +18,17 @@ ...@@ -18,14 +18,17 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import java.io.IOException; import com.google.common.base.Preconditions;
import java.io.StringWriter; import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject; import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes; import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue; import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET; import javax.ws.rs.GET;
import javax.ws.rs.POST; import javax.ws.rs.POST;
...@@ -38,18 +41,6 @@ import javax.ws.rs.core.Context; ...@@ -38,18 +41,6 @@ import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.json.simple.JSONValue;
import org.json.simple.parser.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
/** /**
* Entity management operations as REST API. * Entity management operations as REST API.
* *
...@@ -68,7 +59,7 @@ public class EntityResource { ...@@ -68,7 +59,7 @@ public class EntityResource {
* Created by the Guice ServletModule and injected with the * Created by the Guice ServletModule and injected with the
* configured MetadataService. * configured MetadataService.
* *
* @param metadataService * @param metadataService metadata service handle
*/ */
@Inject @Inject
public EntityResource(MetadataService metadataService) { public EntityResource(MetadataService metadataService) {
...@@ -82,13 +73,13 @@ public class EntityResource { ...@@ -82,13 +73,13 @@ public class EntityResource {
public Response submit(@Context HttpServletRequest request, public Response submit(@Context HttpServletRequest request,
@PathParam("entityType") final String entityType) { @PathParam("entityType") final String entityType) {
try { try {
final String entity = getEntity(request, entityType); final String entity = Servlets.getRequestPayload(request);
System.out.println("entity = " + entity); System.out.println("entity = " + entity);
validateEntity(entity, entityType);
final String guid = metadataService.createEntity(entity, entityType); final String guid = metadataService.createEntity(entity, entityType);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("GUID", guid); response.put("GUID", guid);
response.put("requestId", Thread.currentThread().getName());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (Exception e) { } catch (Exception e) {
...@@ -97,19 +88,6 @@ public class EntityResource { ...@@ -97,19 +88,6 @@ public class EntityResource {
} }
} }
private String getEntity(HttpServletRequest request,
String entityType) throws IOException {
StringWriter writer = new StringWriter();
IOUtils.copy(request.getInputStream(), writer);
return writer.toString();
}
private void validateEntity(String entity, String entityType) throws ParseException {
Preconditions.checkNotNull(entity, "entity cannot be null");
Preconditions.checkNotNull(entityType, "entity type cannot be null");
JSONValue.parseWithException(entity);
}
@GET @GET
@Path("definition/{guid}") @Path("definition/{guid}")
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
...@@ -118,10 +96,19 @@ public class EntityResource { ...@@ -118,10 +96,19 @@ public class EntityResource {
try { try {
final String entityDefinition = metadataService.getEntityDefinition(guid); final String entityDefinition = metadataService.getEntityDefinition(guid);
return (entityDefinition == null)
? Response.status(Response.Status.NOT_FOUND).build() JSONObject response = new JSONObject();
: Response.ok(entityDefinition).build(); response.put("requestId", Thread.currentThread().getName());
} catch (MetadataException e) {
Response.Status status = Response.Status.NOT_FOUND;
if (entityDefinition != null) {
response.put("definition", entityDefinition);
status = Response.Status.OK;
}
return Response.status(status).entity(response).build();
} catch (Exception e) {
LOG.error("Action failed: {}\nError: {}", LOG.error("Action failed: {}\nError: {}",
Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); Response.Status.INTERNAL_SERVER_ERROR, e.getMessage());
throw new WebApplicationException(e, Response throw new WebApplicationException(e, Response
...@@ -148,49 +135,4 @@ public class EntityResource { ...@@ -148,49 +135,4 @@ public class EntityResource {
@QueryParam("numResults") Integer resultsPerPage) { @QueryParam("numResults") Integer resultsPerPage) {
return Response.ok().build(); return Response.ok().build();
} }
@POST
@Path("validate/{entityType}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response validate(@Context HttpServletRequest request,
@PathParam("entityType") String entityType) {
return Response.ok().build();
}
@DELETE
@Path("delete/{entityType}/{entityName}")
@Produces(MediaType.APPLICATION_JSON)
public Response delete(
@Context HttpServletRequest request,
@PathParam("entityType") final String entityType,
@PathParam("entityName") final String entityName) {
return Response.ok().build();
}
@POST
@Path("update/{entityType}/{entityName}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response update(@Context HttpServletRequest request,
@PathParam("entityType") final String entityType,
@PathParam("entityName") final String entityName) {
return Response.ok().build();
}
@GET
@Path("status/{entityType}/{entityName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getStatus(@PathParam("entityType") String entityType,
@PathParam("entityName") String entityName) {
return Response.ok().build();
}
@GET
@Path("dependencies/{entityType}/{entityName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getDependencies(@PathParam("entityType") String entityType,
@PathParam("entityName") String entityName) {
return Response.ok().build();
}
} }
...@@ -18,12 +18,27 @@ ...@@ -18,12 +18,27 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*; import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context; import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.util.List;
/** /**
* This class provides RESTful API for Types. * This class provides RESTful API for Types.
...@@ -35,37 +50,77 @@ import javax.ws.rs.core.Response; ...@@ -35,37 +50,77 @@ import javax.ws.rs.core.Response;
@Singleton @Singleton
public class TypesResource { public class TypesResource {
private static final Logger LOG = LoggerFactory.getLogger(EntityResource.class);
private final MetadataService metadataService;
@Inject
public TypesResource(MetadataService metadataService) {
this.metadataService = metadataService;
}
@POST @POST
@Path("submit/{type}") @Path("submit/{typeName}")
@Consumes(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response submit(@Context HttpServletRequest request, public Response submit(@Context HttpServletRequest request,
@PathParam("type") String type) { @PathParam("typeName") String typeName) {
return Response.ok().build(); try {
final String typeDefinition = Servlets.getRequestPayload(request);
LOG.debug("creating type {} with definition {} ", typeName, typeDefinition);
JSONObject typesAdded = metadataService.createType(typeName, typeDefinition);
JSONObject response = new JSONObject();
response.put("typeName", typeName);
response.put("types", typesAdded);
response.put("requestId", Thread.currentThread().getName());
return Response.ok(response).build();
} catch (Exception e) {
LOG.error("Unable to persist entity object", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
}
} }
@GET @GET
@Path("definition/{type}") @Path("definition/{typeName}")
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getDefinition(@Context HttpServletRequest request, public Response getDefinition(@Context HttpServletRequest request,
@PathParam("type") String type) { @PathParam("typeName") String typeName) {
return Response.ok().build(); try {
} final String typeDefinition = metadataService.getTypeDefinition(typeName);
@DELETE JSONObject response = new JSONObject();
@Path("delete/{type}") response.put("typeName", typeName);
@Produces(MediaType.APPLICATION_JSON) response.put("definition", typeDefinition);
public Response delete(@Context HttpServletRequest request, response.put("requestId", Thread.currentThread().getName());
@PathParam("type") String type) {
// todo - should this be supported? return Response.ok(response).build();
return Response.status(Response.Status.BAD_REQUEST).build(); } catch (Exception e) {
LOG.error("Unable to persist entity object", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
}
} }
@POST @GET
@Path("update/{type}") @Path("list")
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response update(@Context HttpServletRequest request, public Response getTypeNames(@Context HttpServletRequest request) {
@PathParam("type") String type) { try {
return Response.ok().build(); final List<String> typeNamesList = metadataService.getTypeNamesList();
JSONObject response = new JSONObject();
response.put("list", new JSONArray(typeNamesList));
response.put("requestId", Thread.currentThread().getName());
return Response.ok(response).build();
} catch (Exception e) {
LOG.error("Unable to persist entity object", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
}
} }
} }
...@@ -18,12 +18,15 @@ ...@@ -18,12 +18,15 @@
package org.apache.hadoop.metadata.web.util; package org.apache.hadoop.metadata.web.util;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.StringWriter;
/** /**
* Utility functions for dealing with servlets. * Utility functions for dealing with servlets.
...@@ -102,4 +105,10 @@ public final class Servlets { ...@@ -102,4 +105,10 @@ public final class Servlets {
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.build(); .build();
} }
public static String getRequestPayload(HttpServletRequest request) throws IOException {
StringWriter writer = new StringWriter();
IOUtils.copy(request.getInputStream(), writer);
return writer.toString();
}
} }
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
######### Graph Database Configs #########
# Graph implementation
#metadata.graph.blueprints.graph=com.thinkaurelius.titan.core.TitanFactory
# Graph Storage
metadata.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=target/data/berkeley
# Graph Search Index
metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=target/data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
######### Graph Database Configs #########
######### Security Properties #########
# SSL config
metadata.enableTLS=false
######### Security Properties #########
...@@ -40,9 +40,6 @@ ...@@ -40,9 +40,6 @@
</filter-mapping> </filter-mapping>
<listener> <listener>
<listener-class>org.apache.hadoop.metadata.web.listeners.ApplicationStartupListener</listener-class>
</listener>
<listener>
<listener-class>org.apache.hadoop.metadata.web.listeners.GuiceServletConfig</listener-class> <listener-class>org.apache.hadoop.metadata.web.listeners.GuiceServletConfig</listener-class>
</listener> </listener>
</web-app> </web-app>
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import com.google.common.collect.ImmutableList;
import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.DefaultClientConfig; import com.sun.jersey.api.client.config.DefaultClientConfig;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.IDataType;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.TraitType;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriBuilder;
...@@ -21,4 +28,22 @@ public class BaseResourceIT { ...@@ -21,4 +28,22 @@ public class BaseResourceIT {
service = client.resource(UriBuilder.fromUri(baseUrl).build()); service = client.resource(UriBuilder.fromUri(baseUrl).build());
} }
protected AttributeDefinition createRequiredAttrDef(String name,
IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(),
Multiplicity.REQUIRED, false, null);
}
@SuppressWarnings("unchecked")
protected HierarchicalTypeDefinition<TraitType> createTraitTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(TraitType.class, name, superTypes, attrDefs);
}
@SuppressWarnings("unchecked")
protected HierarchicalTypeDefinition<ClassType> createClassTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(ClassType.class, name, superTypes, attrDefs);
}
} }
...@@ -38,6 +38,7 @@ import com.sun.jersey.api.client.WebResource; ...@@ -38,6 +38,7 @@ import com.sun.jersey.api.client.WebResource;
/** /**
* Integration tests for Entity Jersey Resource. * Integration tests for Entity Jersey Resource.
*/ */
@Test (enabled = false)
public class EntityJerseyResourceIT extends BaseResourceIT { public class EntityJerseyResourceIT extends BaseResourceIT {
private static final String ENTITY_NAME = "clicks-table"; private static final String ENTITY_NAME = "clicks-table";
...@@ -45,7 +46,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -45,7 +46,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
private static final String DATABASE_NAME = "ads"; private static final String DATABASE_NAME = "ads";
private static final String TABLE_NAME = "clicks-table"; private static final String TABLE_NAME = "clicks-table";
@Test @Test (enabled = false)
public void testSubmitEntity() { public void testSubmitEntity() {
String entityStream = getTestEntityJSON(); String entityStream = getTestEntityJSON();
JsonParser parser = new JsonParser(); JsonParser parser = new JsonParser();
...@@ -73,7 +74,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -73,7 +74,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
} }
} }
@Test (dependsOnMethods = "testSubmitEntity") @Test (dependsOnMethods = "testSubmitEntity", enabled = false)
public void testGetEntityDefinition() { public void testGetEntityDefinition() {
WebResource resource = service WebResource resource = service
.path("api/metadata/entities/definition") .path("api/metadata/entities/definition")
...@@ -98,7 +99,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -98,7 +99,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
return JSONValue.toJSONString(props); return JSONValue.toJSONString(props);
} }
@Test @Test (enabled = false)
public void testGetInvalidEntityDefinition() { public void testGetInvalidEntityDefinition() {
WebResource resource = service WebResource resource = service
.path("api/metadata/entities/definition") .path("api/metadata/entities/definition")
...@@ -114,7 +115,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -114,7 +115,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
System.out.println("response = " + response); System.out.println("response = " + response);
} }
@Test (dependsOnMethods = "testSubmitEntity") @Test (dependsOnMethods = "testSubmitEntity", enabled = false)
public void testGetEntityList() { public void testGetEntityList() {
ClientResponse clientResponse = service ClientResponse clientResponse = service
.path("api/metadata/entities/list/") .path("api/metadata/entities/list/")
......
...@@ -12,6 +12,7 @@ import javax.ws.rs.core.Response; ...@@ -12,6 +12,7 @@ import javax.ws.rs.core.Response;
/** /**
* Integration tests for Rexster Graph Jersey Resource. * Integration tests for Rexster Graph Jersey Resource.
*/ */
@Test
public class RexsterGraphJerseyResourceIT extends BaseResourceIT { public class RexsterGraphJerseyResourceIT extends BaseResourceIT {
@Test (enabled = false) @Test (enabled = false)
...@@ -31,7 +32,6 @@ public class RexsterGraphJerseyResourceIT extends BaseResourceIT { ...@@ -31,7 +32,6 @@ public class RexsterGraphJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(response); Assert.assertNotNull(response);
} }
@Test
public void testGetVertexWithInvalidId() throws Exception { public void testGetVertexWithInvalidId() throws Exception {
WebResource resource = service WebResource resource = service
.path("api/metadata/graph/vertices/blah"); .path("api/metadata/graph/vertices/blah");
...@@ -43,22 +43,18 @@ public class RexsterGraphJerseyResourceIT extends BaseResourceIT { ...@@ -43,22 +43,18 @@ public class RexsterGraphJerseyResourceIT extends BaseResourceIT {
Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode()); Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
} }
@Test
public void testGetVertexProperties() throws Exception { public void testGetVertexProperties() throws Exception {
} }
@Test
public void testGetVertices() throws Exception { public void testGetVertices() throws Exception {
} }
@Test
public void testGetVertexEdges() throws Exception { public void testGetVertexEdges() throws Exception {
} }
@Test
public void testGetEdge() throws Exception { public void testGetEdge() throws Exception {
} }
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.web.resources;
import com.google.common.collect.ImmutableList;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.json.TypesSerialization;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.DataTypes;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructTypeDefinition;
import org.apache.hadoop.metadata.types.TraitType;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.List;
/**
* Integration test for types jersey resource.
*/
public class TypesJerseyResourceIT extends BaseResourceIT {
private TypeSystem typeSystem;
private List<HierarchicalTypeDefinition> typeDefinitions;
@BeforeClass
public void setUp() throws Exception {
super.setUp();
typeSystem = new TypeSystem();
typeDefinitions = createHiveTypes();
}
@AfterClass
public void tearDown() throws Exception {
typeDefinitions.clear();
}
@Test
public void testSubmit() throws Exception {
for (HierarchicalTypeDefinition typeDefinition : typeDefinitions) {
String typesAsJSON = TypesSerialization.toJson(
typeSystem, typeDefinition.typeName);
System.out.println("typesAsJSON = " + typesAsJSON);
WebResource resource = service
.path("api/metadata/types/submit")
.path(typeDefinition.typeName);
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, typesAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String response = clientResponse.getEntity(String.class);
Assert.assertNotNull(response);
}
}
@Test
public void testGetDefinition() throws Exception {
}
@Test
public void testGetTypeNames() throws Exception {
}
private List<HierarchicalTypeDefinition> createHiveTypes() throws MetadataException {
ArrayList<HierarchicalTypeDefinition> typeDefinitions = new ArrayList<>();
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
createClassTypeDef("database", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("description", DataTypes.STRING_TYPE));
typeDefinitions.add(databaseTypeDefinition);
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = createClassTypeDef(
"table",
ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("description", DataTypes.STRING_TYPE),
createRequiredAttrDef("type", DataTypes.STRING_TYPE),
new AttributeDefinition("database",
"database", Multiplicity.REQUIRED, false, "database"));
typeDefinitions.add(tableTypeDefinition);
HierarchicalTypeDefinition<TraitType> fetlTypeDefinition = createTraitTypeDef(
"fetl",
ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE));
typeDefinitions.add(fetlTypeDefinition);
typeSystem.defineTypes(
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(fetlTypeDefinition),
ImmutableList.of(databaseTypeDefinition, tableTypeDefinition));
return typeDefinitions;
}
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment