Commit 34cdef44 by Dan Markwat

Merge remote-tracking branch 'origin/guice-enabled'

Conflicts: repository/pom.xml repository/src/main/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepository.java repository/src/main/java/org/apache/hadoop/metadata/services/MetadataRepository.java repository/src/main/java/org/apache/hadoop/metadata/services/TitanGraphService.java repository/src/test/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryTest.java webapp/src/main/java/org/apache/hadoop/metadata/web/resources/EntityResource.java webapp/src/main/resources/application.properties webapp/src/test/java/org/apache/hadoop/metadata/GraphRepositoryServiceIT.java
parents faf0b48b 96350bda
...@@ -27,13 +27,6 @@ import java.io.IOException; ...@@ -27,13 +27,6 @@ import java.io.IOException;
public interface Service extends Closeable { public interface Service extends Closeable {
/** /**
* Name of the service.
*
* @return name of the service
*/
String getName();
/**
* Starts the service. This method blocks until the service has completely started. * Starts the service. This method blocks until the service has completely started.
* *
* @throws Exception * @throws Exception
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.service;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Initializer that uses at startup to bring up all the Metadata startup
* services.
*/
public class ServiceInitializer {
private static final Logger LOG = LoggerFactory
.getLogger(ServiceInitializer.class);
private final Services services = Services.get();
// default property file name/path
private static final String DEFAULT_CONFIG_PATH = "application.properties";
// system property referenced by this class to extract user-overriden
// properties file
public static final String PROPERTIES_SYS_PROP = "metadata.properties";
// Path to the properties file (must be on the classpath for
// PropertiesConfiguration to work)
private final String propertyPath;
/**
* Default constructor. Use the metadata.properties System property to
* determine the property file name.
*/
public ServiceInitializer() {
propertyPath = System.getProperty(PROPERTIES_SYS_PROP,
DEFAULT_CONFIG_PATH);
}
/**
* Create a ServiceInitializer, specifying the properties file filename
* explicitly
*
* @param propPath
* the filename of the properties file with the service
* intializer information
*/
public ServiceInitializer(String propPath) {
propertyPath = propPath;
}
/**
* Get the configuration properties for the ServiceInitializer
*
* @return
* @throws ConfigurationException
*/
public PropertiesConfiguration getConfiguration()
throws ConfigurationException {
return new PropertiesConfiguration(propertyPath);
}
/**
* Initialize the services specified by the application.services property
*
* @throws MetadataException
*/
public void initialize() throws MetadataException {
/*
* TODO - determine whether this service model is the right model;
* Inter-service dependencies can wreak havoc using the current model
*/
String[] serviceClassNames;
LOG.info("Loading services using properties file: {}", propertyPath);
try {
PropertiesConfiguration configuration = getConfiguration();
serviceClassNames = configuration
.getStringArray("application.services");
} catch (ConfigurationException e) {
throw new RuntimeException("unable to get server properties");
}
for (String serviceClassName : serviceClassNames) {
serviceClassName = serviceClassName.trim();
if (serviceClassName.isEmpty()) {
continue;
}
Service service = ReflectionUtils
.getInstanceByClassName(serviceClassName);
services.register(service);
LOG.info("Initializing service: {}", serviceClassName);
try {
service.start();
} catch (Throwable t) {
LOG.error("Failed to initialize service {}", serviceClassName,
t);
throw new MetadataException(t);
}
LOG.info("Service initialized: {}", serviceClassName);
}
}
public void destroy() throws MetadataException {
for (Service service : services) {
LOG.info("Destroying service: {}", service.getClass().getName());
try {
service.stop();
} catch (Throwable t) {
LOG.error("Failed to destroy service {}", service.getClass()
.getName(), t);
throw new MetadataException(t);
}
LOG.info("Service destroyed: {}", service.getClass().getName());
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.service;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.util.ReflectionUtils;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.NoSuchElementException;
/**
* Repository of services initialized at startup.
*/
public final class Services implements Iterable<Service> {
private static final Services INSTANCE = new Services();
private Services() {
}
public static Services get() {
return INSTANCE;
}
private final Map<String, Service> services =
new LinkedHashMap<String, Service>();
public synchronized void register(Service service) throws MetadataException {
if (services.containsKey(service.getName())) {
throw new MetadataException("Service " + service.getName() + " already registered");
} else {
services.put(service.getName(), service);
}
}
@SuppressWarnings("unchecked")
public <T extends Service> T getService(String serviceName) {
if (services.containsKey(serviceName)) {
return (T) services.get(serviceName);
} else {
throw new NoSuchElementException(
"Service " + serviceName + " not registered with registry");
}
}
public boolean isRegistered(String serviceName) {
return services.containsKey(serviceName);
}
@Override
public Iterator<Service> iterator() {
return services.values().iterator();
}
public Service init(String serviceName) throws MetadataException {
if (isRegistered(serviceName)) {
throw new MetadataException("Service is already initialized " + serviceName);
}
String serviceClassName;
try {
PropertiesConfiguration configuration =
new PropertiesConfiguration("application.properties");
serviceClassName = configuration.getString(serviceName + ".impl");
} catch (ConfigurationException e) {
throw new MetadataException("unable to get server properties");
}
Service service = ReflectionUtils.getInstanceByClassName(serviceClassName);
register(service);
return service;
}
public void reset() {
services.clear();
}
}
package org.apache.hadoop.metadata.service;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Unit test for the Service Initializer.
*
* Test functionality to allow loading of different property files.
*/
public class ServiceInitializerTest {
private final String propertiesFileName = "test.application.properties";
private ServiceInitializer sinit;
@BeforeClass
public void setUp() throws Exception {
// setup for the test properties file
System.setProperty(ServiceInitializer.PROPERTIES_SYS_PROP,
propertiesFileName);
sinit = new ServiceInitializer();
}
@AfterClass
public void tearDown() throws Exception {
// test destruction of the Services - no exceptions is assumed a success
sinit.destroy();
}
@Test
public void testPropsAreSet() throws Exception {
Assert.assertEquals(
sinit.getConfiguration().getString(
"application.services"),
TestService.NAME);
}
@Test
public void testInitialize() throws Exception {
// test the initialization of the initializer
// no exceptions is assumed a success
sinit.initialize();
}
}
package org.apache.hadoop.metadata.service;
import java.io.IOException;
public class TestService implements Service {
public static final String NAME = TestService.class.getName();
@Override
public String getName() {
return NAME;
}
@Override
public void start() throws Exception {
}
@Override
public void stop() {
}
@Override
public void close() throws IOException {
}
}
application.services=org.apache.hadoop.metadata.service.TestService
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<!-- <!-- ~ Licensed to the Apache Software Foundation (ASF) under one ~ or more
~ Licensed to the Apache Software Foundation (ASF) under one contributor license agreements. See the NOTICE file ~ distributed with this
~ or more contributor license agreements. See the NOTICE file work for additional information ~ regarding copyright ownership. The ASF
~ distributed with this work for additional information licenses this file ~ to you under the Apache License, Version 2.0 (the ~
~ regarding copyright ownership. The ASF licenses this file "License"); you may not use this file except in compliance ~ with the License.
~ to you under the Apache License, Version 2.0 (the You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0
~ "License"); you may not use this file except in compliance ~ ~ Unless required by applicable law or agreed to in writing, software ~
~ with the License. You may obtain a copy of the License at distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT
~ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the
~ http://www.apache.org/licenses/LICENSE-2.0 License for the specific language governing permissions and ~ limitations
~ under the License. -->
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS, <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
...@@ -93,6 +86,12 @@ ...@@ -93,6 +86,12 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-throwingproviders</artifactId>
<version>3.0</version>
</dependency>
<dependency>
<groupId>org.codehaus.jettison</groupId> <groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId> <artifactId>jettison</artifactId>
</dependency> </dependency>
......
...@@ -24,12 +24,49 @@ ...@@ -24,12 +24,49 @@
*/ */
package org.apache.hadoop.metadata; package org.apache.hadoop.metadata;
import org.apache.hadoop.metadata.services.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.services.GraphProvider;
import org.apache.hadoop.metadata.services.GraphService;
import org.apache.hadoop.metadata.services.GraphServiceConfigurator;
import org.apache.hadoop.metadata.services.MetadataRepository;
import org.apache.hadoop.metadata.services.TitanGraphProvider;
import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.storage.memory.MemRepository;
import com.google.inject.Scopes;
import com.google.inject.throwingproviders.ThrowingProviderBinder;
import com.thinkaurelius.titan.core.TitanGraph;
/** /**
* Guice module for Repository module. * Guice module for Repository module.
*/ */
public class RepositoryMetadataModule extends com.google.inject.AbstractModule { public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// Graph Service implementation class
private Class<? extends GraphService> graphServiceClass;
// MetadataRepositoryService implementation class
private Class<? extends MetadataRepository> metadataRepoClass;
public RepositoryMetadataModule() {
GraphServiceConfigurator gsp = new GraphServiceConfigurator();
// get the impl classes for the repo and the graph service
this.graphServiceClass = gsp.getImplClass();
this.metadataRepoClass = GraphBackedMetadataRepository.class;
}
protected void configure() { protected void configure() {
// add configuration logic here // special wiring for Titan Graph
ThrowingProviderBinder.create(binder())
.bind(GraphProvider.class, TitanGraph.class)
.to(TitanGraphProvider.class)
.in(Scopes.SINGLETON);
// allow for dynamic binding of the metadata repo & graph service
// bind the MetadataRepositoryService interface to an implementation
bind(MetadataRepository.class).to(metadataRepoClass);
// bind the GraphService interface to an implementation
bind(GraphService.class).to(graphServiceClass);
} }
} }
...@@ -18,25 +18,31 @@ ...@@ -18,25 +18,31 @@
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.services;
import java.io.IOException;
import java.util.List;
import javax.inject.Inject;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.json.Serialization$; import org.apache.hadoop.metadata.json.Serialization$;
import org.apache.hadoop.metadata.service.Services;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
public class DefaultMetadataService implements MetadataService { public class DefaultMetadataService implements MetadataService {
private static final Logger LOG = private static final Logger LOG =
LoggerFactory.getLogger(DefaultMetadataService.class); LoggerFactory.getLogger(DefaultMetadataService.class);
public static final String NAME = DefaultMetadataService.class.getSimpleName();
private TypeSystem typeSystem; private final TypeSystem typeSystem;
private MetadataRepository repositoryService; private final MetadataRepository repository;
@Inject
DefaultMetadataService(MetadataRepository repository) throws MetadataException {
this.typeSystem = new TypeSystem();
this.repository = repository;
}
/** /**
* Creates a new type based on the type system to enable adding * Creates a new type based on the type system to enable adding
...@@ -84,7 +90,7 @@ public class DefaultMetadataService implements MetadataService { ...@@ -84,7 +90,7 @@ public class DefaultMetadataService implements MetadataService {
String entityDefinition) throws MetadataException { String entityDefinition) throws MetadataException {
ITypedReferenceableInstance entityInstance = ITypedReferenceableInstance entityInstance =
Serialization$.MODULE$.fromJson(entityDefinition); Serialization$.MODULE$.fromJson(entityDefinition);
return repositoryService.createEntity(entityInstance, entityType); return repository.createEntity(entityInstance, entityType);
} }
/** /**
...@@ -124,35 +130,12 @@ public class DefaultMetadataService implements MetadataService { ...@@ -124,35 +130,12 @@ public class DefaultMetadataService implements MetadataService {
} }
/** /**
* Name of the service.
*
* @return name of the service
*/
@Override
public String getName() {
return NAME;
}
/**
* Starts the service. This method blocks until the service has completely started. * Starts the service. This method blocks until the service has completely started.
* *
* @throws Exception * @throws Exception
*/ */
@Override @Override
public void start() throws Exception { public void start() throws Exception {
LOG.info("Initializing the Metadata service");
if (Services.get().isRegistered(TitanGraphService.NAME)) {
DefaultTypesService typesService = Services.get().getService(DefaultTypesService.NAME);
typeSystem = typesService.getTypeSystem();
} else {
throw new RuntimeException("Types service is not initialized");
}
if (Services.get().isRegistered(TitanGraphService.NAME)) {
repositoryService = Services.get().getService(GraphBackedMetadataRepository.NAME);
} else {
throw new RuntimeException("repository service is not initialized");
}
} }
/** /**
...@@ -160,8 +143,6 @@ public class DefaultMetadataService implements MetadataService { ...@@ -160,8 +143,6 @@ public class DefaultMetadataService implements MetadataService {
*/ */
@Override @Override
public void stop() { public void stop() {
// do nothing
repositoryService = null;
} }
/** /**
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.services;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class DefaultTypesService implements TypesService {
private static final Logger LOG =
LoggerFactory.getLogger(DefaultTypesService.class);
public static final String NAME = DefaultTypesService.class.getSimpleName();
private TypeSystem typeSystem;
@Override
public TypeSystem getTypeSystem() {
assert typeSystem != null;
return typeSystem;
}
/**
* Name of the service.
*
* @return name of the service
*/
@Override
public String getName() {
return NAME;
}
/**
* Starts the service. This method blocks until the service has completely started.
*
* @throws Exception
*/
@Override
public void start() throws Exception {
LOG.info("Initializing the type system");
typeSystem = new TypeSystem();
}
/**
* Stops the service. This method blocks until the service has completely shut down.
*/
@Override
public void stop() {
}
/**
* A version of stop() that is designed to be usable in Java7 closure
* clauses.
* Implementation classes MUST relay this directly to {@link #stop()}
*
* @throws java.io.IOException never
* @throws RuntimeException on any failure during the stop operation
*/
@Override
public void close() throws IOException {
}
}
...@@ -18,16 +18,23 @@ ...@@ -18,16 +18,23 @@
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.services;
import com.tinkerpop.blueprints.Direction; import java.io.IOException;
import com.tinkerpop.blueprints.Edge; import java.util.ArrayList;
import com.tinkerpop.blueprints.TransactionalGraph; import java.util.Collections;
import com.tinkerpop.blueprints.Vertex; import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import javax.inject.Inject;
import org.apache.hadoop.metadata.IReferenceableInstance; import org.apache.hadoop.metadata.IReferenceableInstance;
import org.apache.hadoop.metadata.ITypedInstance; import org.apache.hadoop.metadata.ITypedInstance;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.ITypedStruct; import org.apache.hadoop.metadata.ITypedStruct;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.service.Services;
import org.apache.hadoop.metadata.storage.Id; import org.apache.hadoop.metadata.storage.Id;
import org.apache.hadoop.metadata.storage.MapIds; import org.apache.hadoop.metadata.storage.MapIds;
import org.apache.hadoop.metadata.storage.RepositoryException; import org.apache.hadoop.metadata.storage.RepositoryException;
...@@ -41,18 +48,14 @@ import org.apache.hadoop.metadata.types.TypeSystem; ...@@ -41,18 +48,14 @@ import org.apache.hadoop.metadata.types.TypeSystem;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import com.tinkerpop.blueprints.Direction;
import java.util.ArrayList; import com.tinkerpop.blueprints.Edge;
import java.util.Collections; import com.tinkerpop.blueprints.TransactionalGraph;
import java.util.HashMap; import com.tinkerpop.blueprints.Vertex;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
/** /**
* An implementation backed by Titan Graph DB. * An implementation backed by a Graph database provided
* as a Graph Service.
*/ */
public class GraphBackedMetadataRepository implements MetadataRepository { public class GraphBackedMetadataRepository implements MetadataRepository {
...@@ -69,19 +72,16 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -69,19 +72,16 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
private final AtomicInteger ID_SEQ = new AtomicInteger(0); private final AtomicInteger ID_SEQ = new AtomicInteger(0);
// private ConcurrentHashMap<String, ITypedReferenceableInstance> types; // private ConcurrentHashMap<String, ITypedReferenceableInstance> types;
private ConcurrentHashMap<String, ITypedReferenceableInstance> instances; private final ConcurrentHashMap<String, ITypedReferenceableInstance> instances;
private GraphService graphService; private final GraphService graphService;
private TypeSystem typeSystem; private final TypeSystem typeSystem;
/** @Inject
* Name of the service. GraphBackedMetadataRepository(GraphService graphService) throws MetadataException {
* this.instances = new ConcurrentHashMap<>();
* @return name of the service this.graphService = graphService;
*/ this.typeSystem = new TypeSystem();
@Override
public String getName() {
return NAME;
} }
/** /**
...@@ -91,20 +91,6 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -91,20 +91,6 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
*/ */
@Override @Override
public void start() throws Exception { public void start() throws Exception {
if (Services.get().isRegistered(TitanGraphService.NAME)) {
graphService = Services.get().getService(TitanGraphService.NAME);
} else {
throw new RuntimeException("graph service is not initialized");
}
if (Services.get().isRegistered(DefaultTypesService.NAME)) {
DefaultTypesService typesService = Services.get().getService(DefaultTypesService.NAME);
typeSystem = typesService.getTypeSystem();
} else {
throw new RuntimeException("Types service is not initialized");
}
instances = new ConcurrentHashMap<>();
} }
/** /**
...@@ -112,8 +98,6 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -112,8 +98,6 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
*/ */
@Override @Override
public void stop() { public void stop() {
// do nothing
graphService = null;
} }
/** /**
...@@ -129,16 +113,12 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -129,16 +113,12 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
stop(); stop();
} }
private TransactionalGraph getTransactionalGraph() {
return graphService.getTransactionalGraph();
}
@Override @Override
public String createEntity(IReferenceableInstance entity, public String createEntity(IReferenceableInstance entity,
String entityType) throws RepositoryException { String entityType) throws RepositoryException {
LOG.info("adding entity={} type={}", entity, entityType); LOG.info("adding entity={} type={}", entity, entityType);
final TransactionalGraph transactionalGraph = getTransactionalGraph(); final TransactionalGraph transactionalGraph = graphService.getTransactionalGraph();
try { try {
// todo check if this is a duplicate // todo check if this is a duplicate
......
package org.apache.hadoop.metadata.services;
import org.apache.commons.configuration.ConfigurationException;
import com.google.inject.throwingproviders.CheckedProvider;
import com.tinkerpop.blueprints.Graph;
public interface GraphProvider<T extends Graph> extends CheckedProvider<T> {
@Override
T get() throws ConfigurationException;
}
...@@ -18,12 +18,13 @@ ...@@ -18,12 +18,13 @@
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.services;
import java.util.Set;
import org.apache.hadoop.metadata.service.Service;
import com.tinkerpop.blueprints.Graph; import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.KeyIndexableGraph; import com.tinkerpop.blueprints.KeyIndexableGraph;
import com.tinkerpop.blueprints.TransactionalGraph; import com.tinkerpop.blueprints.TransactionalGraph;
import org.apache.hadoop.metadata.service.Service;
import java.util.Set;
/** /**
* A blueprints based graph service. * A blueprints based graph service.
......
package org.apache.hadoop.metadata.services;
import com.thinkaurelius.titan.core.TitanGraph;
public class GraphServiceConfigurator extends PropertyBasedConfigurator<GraphService> {
private static final String PROPERTY_NAME = "metadata.graph.impl.class";
private static final String DEFAULT_IMPL_CLASS = TitanGraph.class.getName();
private static final String CONFIG_PATH = "application.properties";
public GraphServiceConfigurator() {
super("metadata.graph.propertyName", "metadata.graph.defaultImplClass",
"metadata.graph.configurationPath", PROPERTY_NAME,
DEFAULT_IMPL_CLASS, CONFIG_PATH);
}
}
package org.apache.hadoop.metadata.services;
import java.util.Properties;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
public abstract class PropertyBasedConfigurator<T> {
private final String propertyName;
private final String defaultImplClass;
private final String configurationPath;
PropertyBasedConfigurator(String propertyNameProp, String defaultImplClassProp,
String configurationPathProp, String propertyNameDefaultProp,
String defaultImplClassDefaultProp, String configPathDefaultProp) {
Properties props = System.getProperties();
this.propertyName = props.getProperty(propertyNameProp,
propertyNameDefaultProp);
this.defaultImplClass = props.getProperty(defaultImplClassProp,
defaultImplClassDefaultProp);
this.configurationPath = props.getProperty(configurationPathProp,
configPathDefaultProp);
}
PropertyBasedConfigurator(String propertyNameProp, String defaultImplClassProp,
String configurationPathProp) {
Properties props = System.getProperties();
this.propertyName = props.getProperty(propertyNameProp);
this.defaultImplClass = props.getProperty(defaultImplClassProp);
this.configurationPath = props.getProperty(configurationPathProp);
}
public String getPropertyName() {
return propertyName;
}
public String getDefaultImplClass() {
return defaultImplClass;
}
public String getConfigurationPath() {
return configurationPath;
}
public Configuration getConfiguration() {
String path = getConfigurationPath();
Configuration config = null;
try {
config = new PropertiesConfiguration(path);
} catch (ConfigurationException e) {
config = new PropertiesConfiguration();
}
return config;
}
public String getClassName() {
Configuration config = getConfiguration();
String propName = getPropertyName();
String defaultClass = getDefaultImplClass();
return config.getString(propName, defaultClass);
}
@SuppressWarnings("unchecked")
public Class<? extends T> getImplClass() {
String className = getClassName();
Class<? extends T> ret = null;
try {
ret = (Class<? extends T>) PropertyBasedConfigurator.class
.getClassLoader().loadClass(className);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
return ret;
}
}
package org.apache.hadoop.metadata.services;
import javax.inject.Singleton;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import com.thinkaurelius.titan.core.TitanFactory;
import com.thinkaurelius.titan.core.TitanGraph;
public class TitanGraphProvider implements GraphProvider<TitanGraph> {
private static final String SYSTEM_PROP = "";
private static final String DEFAULT_PATH = "graph.properties";
private final String configPath;
public TitanGraphProvider() {
configPath = System.getProperties().getProperty(SYSTEM_PROP,
DEFAULT_PATH);
}
public Configuration getConfiguration() throws ConfigurationException {
return new PropertiesConfiguration(configPath);
}
@Override
@Singleton
public TitanGraph get() throws ConfigurationException {
TitanGraph graph = null;
Configuration config;
try {
config = getConfiguration();
} catch (ConfigurationException e) {
throw new RuntimeException(e);
}
graph = TitanFactory.open(config);
return graph;
}
}
...@@ -18,7 +18,22 @@ ...@@ -18,7 +18,22 @@
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.services;
import com.thinkaurelius.titan.core.TitanFactory; import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.schema.TitanGraphIndex; import com.thinkaurelius.titan.core.schema.TitanGraphIndex;
import com.thinkaurelius.titan.core.schema.TitanManagement; import com.thinkaurelius.titan.core.schema.TitanManagement;
...@@ -28,90 +43,56 @@ import com.tinkerpop.blueprints.KeyIndexableGraph; ...@@ -28,90 +43,56 @@ import com.tinkerpop.blueprints.KeyIndexableGraph;
import com.tinkerpop.blueprints.TransactionalGraph; import com.tinkerpop.blueprints.TransactionalGraph;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/** /**
* Default implementation for Graph service backed by Titan. * Default implementation for Graph service backed by Titan.
*/ */
@Singleton
public class TitanGraphService implements GraphService { public class TitanGraphService implements GraphService {
private static final Logger LOG = LoggerFactory.getLogger(TitanGraphService.class); private static final Logger LOG = LoggerFactory.getLogger(TitanGraphService.class);
public static final String NAME = TitanGraphService.class.getSimpleName();
/** /**
* Constant for the configuration property that indicates the prefix. * Constant for the configuration property that indicates the prefix.
*/ */
private static final String METADATA_PREFIX = "metadata.graph.";
private static final String INDEXER_PREFIX = "metadata.indexer.vertex."; private static final String INDEXER_PREFIX = "metadata.indexer.vertex.";
private static final List<String> acceptedTypes = Arrays.asList("String","Int","Long"); private static final List<String> acceptedTypes = Arrays.asList("String", "Int", "Long");
private TitanGraph titanGraph; private final TitanGraph titanGraph;
private Set<String> vertexIndexedKeys;
private Set<String> edgeIndexedKeys;
/** /**
* Name of the service. * Initialize this service through injection with a custom Provider.
* *
* @return name of the service * @param graph
* @throws ConfigurationException
*/ */
@Override @Inject
public String getName() { TitanGraphService(GraphProvider<TitanGraph> graph) throws ConfigurationException {
return NAME; // TODO reimplement to save the Provider and initialize the graph inside the start() method
this.titanGraph = graph.get();
//start();
} }
/** /**
* Starts the service. This method blocks until the service has completely started. * Initializes this Service. The starting of Titan is handled by the Provider
* * @throws ConfigurationException
* @throws Exception
*/ */
@Override @Override
public void start() throws Exception { public void start() throws ConfigurationException {
Configuration graphConfig = getConfiguration();
titanGraph = initializeGraphDB(graphConfig);
createIndicesForVertexKeys(); createIndicesForVertexKeys();
// todo - create Edge Cardinality Constraints // todo - create Edge Cardinality Constraints
LOG.info("Initialized titanGraph db: {}", titanGraph); LOG.info("Initialized titanGraph db: {}", titanGraph);
vertexIndexedKeys = getIndexableGraph().getIndexedKeys(Vertex.class); Set<String> vertexIndexedKeys = getVertexIndexedKeys();
LOG.info("Init vertex property keys: {}", vertexIndexedKeys); LOG.info("Init vertex property keys: {}", vertexIndexedKeys);
edgeIndexedKeys = getIndexableGraph().getIndexedKeys(Edge.class); Set<String> edgeIndexedKeys = getEdgeIndexedKeys();
LOG.info("Init edge property keys: {}", edgeIndexedKeys); LOG.info("Init edge property keys: {}", edgeIndexedKeys);
} }
private static Configuration getConfiguration() throws ConfigurationException { private static Configuration getConfiguration(String filename, String prefix)
PropertiesConfiguration configProperties = throws ConfigurationException {
new PropertiesConfiguration("application.properties"); PropertiesConfiguration configProperties = new PropertiesConfiguration(
filename);
Configuration graphConfig = new PropertiesConfiguration();
final Iterator<String> iterator = configProperties.getKeys();
while (iterator.hasNext()) {
String key = iterator.next();
if (key.startsWith(METADATA_PREFIX)) {
String value = (String) configProperties.getProperty(key);
key = key.substring(METADATA_PREFIX.length());
graphConfig.setProperty(key, value);
}
}
return graphConfig;
}
private static Configuration getConfiguration(String filename, String prefix) throws ConfigurationException {
PropertiesConfiguration configProperties =
new PropertiesConfiguration(filename);
Configuration graphConfig = new PropertiesConfiguration(); Configuration graphConfig = new PropertiesConfiguration();
...@@ -128,13 +109,12 @@ public class TitanGraphService implements GraphService { ...@@ -128,13 +109,12 @@ public class TitanGraphService implements GraphService {
return graphConfig; return graphConfig;
} }
protected TitanGraph initializeGraphDB(Configuration graphConfig) { /**
LOG.info("Initializing titanGraph db"); * Initializes the indices for the graph.
return TitanFactory.open(graphConfig); * @throws ConfigurationException
} */
// TODO move this functionality to the MetadataRepository?
protected void createIndicesForVertexKeys() throws ConfigurationException { protected void createIndicesForVertexKeys() throws ConfigurationException {
if (!titanGraph.getIndexedKeys(Vertex.class).isEmpty()) { if (!titanGraph.getIndexedKeys(Vertex.class).isEmpty()) {
LOG.info("Indexes already exist for titanGraph"); LOG.info("Indexes already exist for titanGraph");
return; return;
...@@ -165,7 +145,8 @@ public class TitanGraphService implements GraphService { ...@@ -165,7 +145,8 @@ public class TitanGraphService implements GraphService {
it = propList.iterator(); it = propList.iterator();
while (it.hasNext()) { while (it.hasNext()) {
// Pull the property name and index, so we can register the name and look up the type. // Pull the property name and index, so we can register the name
// and look up the type.
String prop = it.next().toString(); String prop = it.next().toString();
String index = prop.substring(prop.lastIndexOf(".") + 1); String index = prop.substring(prop.lastIndexOf(".") + 1);
String type = null; String type = null;
...@@ -173,19 +154,28 @@ public class TitanGraphService implements GraphService { ...@@ -173,19 +154,28 @@ public class TitanGraphService implements GraphService {
// Look up the type for the specified property name. // Look up the type for the specified property name.
if (indexConfig.containsKey("property.type." + index)) { if (indexConfig.containsKey("property.type." + index)) {
type = indexConfig.getProperty("property.type." + index).toString(); type = indexConfig.getProperty("property.type." + index)
.toString();
} else { } else {
throw new ConfigurationException("No type specified for property " + index + " in indexer.properties."); throw new ConfigurationException(
"No type specified for property " + index
+ " in indexer.properties.");
} }
// Is the type submitted one of the approved ones? // Is the type submitted one of the approved ones?
if (!acceptedTypes.contains(type)) { if (!acceptedTypes.contains(type)) {
throw new ConfigurationException("The type provided in indexer.properties for property " + prop + " is not supported. Supported types are: " + acceptedTypes.toString()); throw new ConfigurationException(
"The type provided in indexer.properties for property "
+ prop
+ " is not supported. Supported types are: "
+ acceptedTypes.toString());
} }
// Add the key. // Add the key.
LOG.info("Adding property: " + prop + " to index as type: " + type); LOG.info("Adding property: " + prop + " to index as type: "
mgmt.addIndexKey(graphIndex,mgmt.makePropertyKey(prop).dataType(type.getClass()).make()); + type);
mgmt.addIndexKey(graphIndex, mgmt.makePropertyKey(prop)
.dataType(type.getClass()).make());
} }
...@@ -195,7 +185,8 @@ public class TitanGraphService implements GraphService { ...@@ -195,7 +185,8 @@ public class TitanGraphService implements GraphService {
} }
/** /**
* Stops the service. This method blocks until the service has completely shut down. * Stops the service. This method blocks until the service has completely
* shut down.
*/ */
@Override @Override
public void stop() { public void stop() {
...@@ -206,11 +197,13 @@ public class TitanGraphService implements GraphService { ...@@ -206,11 +197,13 @@ public class TitanGraphService implements GraphService {
/** /**
* A version of stop() that is designed to be usable in Java7 closure * A version of stop() that is designed to be usable in Java7 closure
* clauses. * clauses. Implementation classes MUST relay this directly to
* Implementation classes MUST relay this directly to {@link #stop()} * {@link #stop()}
* *
* @throws java.io.IOException never * @throws java.io.IOException
* @throws RuntimeException on any failure during the stop operation * never
* @throws RuntimeException
* on any failure during the stop operation
*/ */
@Override @Override
public void close() throws IOException { public void close() throws IOException {
...@@ -238,11 +231,13 @@ public class TitanGraphService implements GraphService { ...@@ -238,11 +231,13 @@ public class TitanGraphService implements GraphService {
@Override @Override
public Set<String> getVertexIndexedKeys() { public Set<String> getVertexIndexedKeys() {
return vertexIndexedKeys; // this must use the graph API instead of setting this value as a class member - it can change after creation
return getIndexableGraph().getIndexedKeys(Vertex.class);
} }
@Override @Override
public Set<String> getEdgeIndexedKeys() { public Set<String> getEdgeIndexedKeys() {
return edgeIndexedKeys; // this must use the graph API instead of setting this value as a class member - it can change after creation
return getIndexableGraph().getIndexedKeys(Edge.class);
} }
} }
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.services;
import org.apache.hadoop.metadata.service.Service;
import org.apache.hadoop.metadata.types.TypeSystem;
public interface TypesService extends Service {
TypeSystem getTypeSystem();
}
package org.apache.hadoop.metadata;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
public abstract class GuiceEnabledTestBase {
/*
* Guice.createInjector() takes your Modules, and returns a new Injector
* instance. Most applications will call this method exactly once, in their
* main() method.
*/
public final Injector injector;
GuiceEnabledTestBase() {
injector = Guice.createInjector();
}
GuiceEnabledTestBase(Module... modules) {
injector = Guice.createInjector(modules);
}
}
package org.apache.hadoop.metadata;
public abstract class RepositoryModuleBaseTest extends GuiceEnabledTestBase {
public RepositoryModuleBaseTest() {
super(new RepositoryMetadataModule());
}
}
package org.apache.hadoop.metadata;
import junit.framework.Assert;
import org.apache.hadoop.metadata.services.GraphService;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Unit test for Guice injector service loading
*/
public class RepositoryServiceLoadingTest extends GuiceEnabledTestBase {
public RepositoryServiceLoadingTest() {
super(new RepositoryMetadataModule());
}
@BeforeClass
public void setUp() throws Exception {
}
@AfterClass
public void tearDown() throws Exception {
}
@Test
public void testGetGraphService() throws Exception {
/*
* Now that we've got the injector, we can build objects.
*/
GraphService gs = injector.getInstance(GraphService.class);
Assert.assertNotNull(gs);
}
}
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.services;
import com.google.common.collect.ImmutableList; import java.util.List;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataService;
import org.apache.hadoop.metadata.Referenceable; import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.service.Services; import org.apache.hadoop.metadata.RepositoryModuleBaseTest;
import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.storage.memory.MemRepository; import org.apache.hadoop.metadata.storage.memory.MemRepository;
import org.apache.hadoop.metadata.types.AttributeDefinition; import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType; import org.apache.hadoop.metadata.types.ClassType;
...@@ -27,59 +24,44 @@ import org.testng.annotations.AfterClass; ...@@ -27,59 +24,44 @@ import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.util.List; import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
public class GraphBackedMetadataRepositoryTest { public class GraphBackedMetadataRepositoryTest extends RepositoryModuleBaseTest {
private static final String ENTITY_TYPE = "hive-table"; private static final String ENTITY_TYPE = "hive-table";
private TitanGraphService titanGraphService; private TitanGraphService titanGraphService;
private GraphBackedMetadataRepository repositoryService; private GraphBackedMetadataRepository repositoryService;
protected org.apache.hadoop.metadata.MetadataService ms; private IRepository repo;
private TypeSystem ts;
private String guid; private String guid;
@BeforeClass @BeforeClass
public void setUp() throws Exception { public void setUp() throws Exception {
titanGraphService = new TitanGraphService(); titanGraphService = super.injector.getInstance(TitanGraphService.class);
titanGraphService.start(); titanGraphService.start();
Services.get().register(titanGraphService);
DefaultTypesService typesService = new DefaultTypesService();
typesService.start();
Services.get().register(typesService);
TypeSystem ts = typesService.getTypeSystem();
repositoryService = new GraphBackedMetadataRepository(); repositoryService = super.injector.getInstance(GraphBackedMetadataRepository.class);
repositoryService.start(); repositoryService.start();
Services.get().register(repositoryService);
// todo - only used for types ts = new TypeSystem();
MemRepository mr = new MemRepository(ts); repo = new MemRepository(ts);
ms = new org.apache.hadoop.metadata.MetadataService(mr, ts);
MetadataService.setCurrentService(ms);
defineDeptEmployeeTypes(ts); defineDeptEmployeeTypes(ts);
} }
@AfterClass @AfterClass
public void tearDown() throws Exception { public void tearDown() throws Exception {
Services.get().getService(GraphBackedMetadataRepository.NAME).close();
Services.get().getService(TitanGraphService.NAME).close();
Services.get().reset();
}
@Test
public void testGetName() throws Exception {
Assert.assertEquals(GraphBackedMetadataRepository.NAME,
GraphBackedMetadataRepository.class.getSimpleName());
Assert.assertEquals(repositoryService.getName(), GraphBackedMetadataRepository.NAME);
} }
@Test @Test
public void testSubmitEntity() throws Exception { public void testSubmitEntity() throws Exception {
TypeSystem typeSystem = MetadataService.getCurrentService().getTypeSystem(); Referenceable hrDept = createDeptEg1(ts);
Referenceable hrDept = createDeptEg1(typeSystem); ClassType deptType = ts.getDataType(ClassType.class, "Department");
ClassType deptType = typeSystem.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED); ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
guid = repositoryService.createEntity(hrDept2, ENTITY_TYPE); guid = repositoryService.createEntity(hrDept2, ENTITY_TYPE);
...@@ -118,20 +100,6 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -118,20 +100,6 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertEquals(entityList.size(), 0); // as this is not implemented yet Assert.assertEquals(entityList.size(), 0); // as this is not implemented yet
} }
@Test(expectedExceptions = RuntimeException.class)
public void testStartWithOutGraphServiceRegistration() throws Exception {
try {
Services.get().reset();
GraphBackedMetadataRepository repositoryService = new
GraphBackedMetadataRepository();
repositoryService.start();
Assert.fail("This should have thrown an exception");
} finally {
Services.get().register(titanGraphService);
Services.get().register(repositoryService);
}
}
/* /*
* Class Hierarchy is: * Class Hierarchy is:
* Department(name : String, employees : Array[Person]) * Department(name : String, employees : Array[Person])
...@@ -184,7 +152,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -184,7 +152,7 @@ public class GraphBackedMetadataRepositoryTest {
ts.getDataType(ClassType.class, "Manager") ts.getDataType(ClassType.class, "Manager")
); );
ms.getRepository().defineTypes(types); repo.defineTypes(types);
} }
protected Referenceable createDeptEg1(TypeSystem ts) throws MetadataException { protected Referenceable createDeptEg1(TypeSystem ts) throws MetadataException {
......
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.services;
import org.apache.hadoop.metadata.RepositoryModuleBaseTest;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.AfterClass; import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
...@@ -8,13 +9,14 @@ import org.testng.annotations.Test; ...@@ -8,13 +9,14 @@ import org.testng.annotations.Test;
/** /**
* Unit test for TitanGraphService. * Unit test for TitanGraphService.
*/ */
public class TitanGraphServiceTest { public class TitanGraphServiceTest extends RepositoryModuleBaseTest {
private TitanGraphService titanGraphService; private TitanGraphService titanGraphService;
@BeforeClass @BeforeClass
public void setUp() throws Exception { public void setUp() throws Exception {
titanGraphService = new TitanGraphService(); titanGraphService = super.injector.getInstance(TitanGraphService.class);
//titanGraphService = new TitanGraphService();
titanGraphService.start(); titanGraphService.start();
} }
...@@ -24,12 +26,6 @@ public class TitanGraphServiceTest { ...@@ -24,12 +26,6 @@ public class TitanGraphServiceTest {
} }
@Test @Test
public void testGetName() throws Exception {
Assert.assertEquals(TitanGraphService.NAME, TitanGraphService.class.getSimpleName());
Assert.assertEquals(titanGraphService.getName(), TitanGraphService.NAME);
}
@Test
public void testStart() throws Exception { public void testStart() throws Exception {
Assert.assertNotNull(titanGraphService.getBlueprintsGraph()); Assert.assertNotNull(titanGraphService.getBlueprintsGraph());
} }
......
...@@ -16,21 +16,7 @@ ...@@ -16,21 +16,7 @@
# limitations under the License. # limitations under the License.
# #
application.services=org.apache.hadoop.metadata.services.TitanGraphService,\ # GraphService implementation
org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService metadata.graph.impl.class=org.apache.hadoop.metadata.services.TitanGraphService
# Graph implementation
#metadata.graph.blueprints.graph=com.thinkaurelius.titan.core.TitanFactory
# Graph Storage
metadata.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=target/data/berkeley
# Graph Search Index
metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=target/data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
metadata.enableTLS=false metadata.enableTLS=false
storage.backend=inmemory
# Graph Search Index
index.search.backend=elasticsearch
index.search.directory=target/data/es
index.search.elasticsearch.client-only=false
index.search.elasticsearch.local-mode=true
\ No newline at end of file
...@@ -22,6 +22,7 @@ package org.apache.hadoop.metadata; ...@@ -22,6 +22,7 @@ package org.apache.hadoop.metadata;
import org.apache.hadoop.metadata.storage.IRepository; import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
// TODO get rid of this class in favor of Dependency injection
public class MetadataService { public class MetadataService {
final IRepository repo; final IRepository repo;
......
...@@ -127,6 +127,21 @@ ...@@ -127,6 +127,21 @@
<groupId>org.testng</groupId> <groupId>org.testng</groupId>
<artifactId>testng</artifactId> <artifactId>testng</artifactId>
</dependency> </dependency>
<dependency>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-servlet</artifactId>
<version>3.0</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.3.1</version>
</dependency>
<dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-guice</artifactId>
<version>1.18.3</version>
</dependency>
</dependencies> </dependencies>
<build> <build>
...@@ -196,15 +211,15 @@ ...@@ -196,15 +211,15 @@
<artifactId>keytool-maven-plugin</artifactId> <artifactId>keytool-maven-plugin</artifactId>
<executions> <executions>
<execution> <execution>
<id>clean</id>
<phase>generate-resources</phase> <phase>generate-resources</phase>
<id>clean-server</id>
<goals> <goals>
<goal>clean</goal> <goal>clean</goal>
</goals> </goals>
</execution> </execution>
<execution> <execution>
<id>genkey</id>
<phase>generate-resources</phase> <phase>generate-resources</phase>
<id>server</id>
<goals> <goals>
<goal>generateKeyPair</goal> <goal>generateKeyPair</goal>
</goals> </goals>
...@@ -241,6 +256,7 @@ ...@@ -241,6 +256,7 @@
</connector> </connector>
</connectors> </connectors>
<webApp>${project.build.directory}/metadata-webapp-${project.version}</webApp> <webApp>${project.build.directory}/metadata-webapp-${project.version}</webApp>
<contextPath>/</contextPath>
<useTestClasspath>true</useTestClasspath> <useTestClasspath>true</useTestClasspath>
<systemProperties> <systemProperties>
<systemProperty> <systemProperty>
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.web.listeners;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.service.ServiceInitializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
/**
* Listener for bootstrapping Services and configuration properties.
*/
public class ApplicationStartupListener implements ServletContextListener {
private static final Logger LOG = LoggerFactory.getLogger(ApplicationStartupListener.class);
private final ServiceInitializer startupServices = new ServiceInitializer();
@Override
public void contextInitialized(ServletContextEvent sce) {
try {
startupServices.initialize();
showStartupInfo();
} catch (MetadataException e) {
throw new RuntimeException("Error starting services", e);
}
}
private void showStartupInfo() {
StringBuilder buffer = new StringBuilder();
buffer.append("\n############################################");
buffer.append("\n Metadata Server (STARTED) ");
buffer.append("\n############################################");
try {
PropertiesConfiguration configuration = new PropertiesConfiguration("application.properties");
buffer.append(configuration.toString());
} catch (ConfigurationException e) {
buffer.append("*** Unable to get build info ***").append(e.getMessage());
}
LOG.info(buffer.toString());
}
@Override
public void contextDestroyed(ServletContextEvent sce) {
try {
startupServices.destroy();
} catch (MetadataException e) {
LOG.warn("Error destroying services", e);
}
StringBuilder buffer = new StringBuilder();
buffer.append("\n############################################");
buffer.append("\n Metadata Server (SHUTDOWN) ");
buffer.append("\n############################################");
LOG.info(buffer.toString());
}
}
\ No newline at end of file
package org.apache.hadoop.metadata.web.listeners;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.services.GraphService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.servlet.GuiceServletContextListener;
import com.sun.jersey.api.core.PackagesResourceConfig;
import com.sun.jersey.guice.JerseyServletModule;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
public class GuiceServletConfig extends GuiceServletContextListener {
private static final Logger LOG = LoggerFactory
.getLogger(GuiceServletConfig.class);
private static final String GUICE_CTX_PARAM = "guice.packages";
@Override
protected Injector getInjector() {
LOG.info("Loading Guice modules");
/*
* More information on this can be found here:
* https://jersey.java.net/nonav/apidocs/1.11/contribs/jersey-guice/com/sun/jersey/guice/spi/container/servlet/package-summary.html
*/
Injector injector = Guice.createInjector(
new RepositoryMetadataModule(),
new JerseyServletModule() {
@Override
protected void configureServlets() {
String packages = getServletContext().getInitParameter(GUICE_CTX_PARAM);
LOG.info("Jersey loading from packages: " + packages);
Map<String, String> params = new HashMap<String, String>();
params.put(PackagesResourceConfig.PROPERTY_PACKAGES, packages);
serve("/api/metadata/*").with(GuiceContainer.class, params);
}
});
LOG.info("Guice modules loaded");
LOG.info("Bootstrapping services");
// get the Graph Service
GraphService graphService = injector.getInstance(GraphService.class);
try {
// start/init the service
graphService.start();
} catch (Exception e) {
throw new RuntimeException(e);
}
LOG.info(String.format("Loaded Service: %s", graphService.getClass().getName()));
LOG.info("Services bootstrapped successfully");
return injector;
}
}
...@@ -23,6 +23,7 @@ import org.apache.hadoop.metadata.web.util.Servlets; ...@@ -23,6 +23,7 @@ import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import javax.inject.Singleton;
import javax.ws.rs.GET; import javax.ws.rs.GET;
import javax.ws.rs.Path; import javax.ws.rs.Path;
import javax.ws.rs.Produces; import javax.ws.rs.Produces;
...@@ -34,6 +35,7 @@ import javax.ws.rs.core.Response; ...@@ -34,6 +35,7 @@ import javax.ws.rs.core.Response;
* Jersey Resource for admin operations. * Jersey Resource for admin operations.
*/ */
@Path("admin") @Path("admin")
@Singleton
public class AdminResource { public class AdminResource {
@GET @GET
......
...@@ -18,19 +18,11 @@ ...@@ -18,19 +18,11 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import com.google.common.base.Preconditions; import java.io.IOException;
import org.apache.commons.io.IOUtils; import java.io.StringWriter;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.service.Services;
import org.apache.hadoop.metadata.services.DefaultMetadataService;
import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.json.simple.JSONValue;
import org.json.simple.parser.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes; import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE; import javax.ws.rs.DELETE;
...@@ -45,8 +37,18 @@ import javax.ws.rs.WebApplicationException; ...@@ -45,8 +37,18 @@ import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context; import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.StringWriter; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.json.simple.JSONValue;
import org.json.simple.parser.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
/** /**
* Entity management operations as REST API. * Entity management operations as REST API.
...@@ -55,17 +57,22 @@ import java.io.StringWriter; ...@@ -55,17 +57,22 @@ import java.io.StringWriter;
* of the Type they correspond with. * of the Type they correspond with.
*/ */
@Path("entities") @Path("entities")
@Singleton
public class EntityResource { public class EntityResource {
private static final Logger LOG = LoggerFactory.getLogger(EntityResource.class); private static final Logger LOG = LoggerFactory.getLogger(EntityResource.class);
private MetadataService metadataService; private final MetadataService metadataService;
public EntityResource() { /**
metadataService = Services.get().getService(DefaultMetadataService.NAME); * Created by the Guice ServletModule and injected with the
if (metadataService == null) { * configured MetadataService.
throw new RuntimeException("graph service is not initialized"); *
} * @param metadataService
*/
@Inject
public EntityResource(MetadataService metadataService) {
this.metadataService = metadataService;
} }
@POST @POST
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import javax.inject.Singleton;
import javax.ws.rs.Path; import javax.ws.rs.Path;
/** /**
...@@ -30,5 +31,6 @@ import javax.ws.rs.Path; ...@@ -30,5 +31,6 @@ import javax.ws.rs.Path;
* 'search': find entities generated by Hive processes or that were generated by Sqoop, etc. * 'search': find entities generated by Hive processes or that were generated by Sqoop, etc.
*/ */
@Path("discovery") @Path("discovery")
@Singleton
public class MetadataDiscoveryResource { public class MetadataDiscoveryResource {
} }
...@@ -18,25 +18,12 @@ ...@@ -18,25 +18,12 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import com.tinkerpop.blueprints.Direction; import java.util.HashMap;
import com.tinkerpop.blueprints.Edge; import java.util.Map;
import com.tinkerpop.blueprints.Element; import java.util.Set;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.VertexQuery;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONMode;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONUtility;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.service.Services;
import org.apache.hadoop.metadata.services.GraphService;
import org.apache.hadoop.metadata.services.TitanGraphService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.DefaultValue; import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET; import javax.ws.rs.GET;
import javax.ws.rs.Path; import javax.ws.rs.Path;
...@@ -46,9 +33,24 @@ import javax.ws.rs.QueryParam; ...@@ -46,9 +33,24 @@ import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException; import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.util.HashMap;
import java.util.Map; import org.apache.commons.lang.StringUtils;
import java.util.Set; import org.apache.hadoop.metadata.services.GraphService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Element;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.VertexQuery;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONMode;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONUtility;
/** /**
* Jersey Resource for lineage metadata operations. * Jersey Resource for lineage metadata operations.
...@@ -59,22 +61,25 @@ import java.util.Set; ...@@ -59,22 +61,25 @@ import java.util.Set;
* for accessing the backend graph. * for accessing the backend graph.
*/ */
@Path("graph") @Path("graph")
@Singleton
public class RexsterGraphResource { public class RexsterGraphResource {
private static final Logger LOG = LoggerFactory.getLogger(RexsterGraphResource.class); private static final Logger LOG = LoggerFactory.getLogger(RexsterGraphResource.class);
public static final String RESULTS = "results"; public static final String RESULTS = "results";
public static final String TOTAL_SIZE = "totalSize"; public static final String TOTAL_SIZE = "totalSize";
private GraphService graphService; private final GraphService graphService;
public RexsterGraphResource() { @Inject
graphService = Services.get().getService(TitanGraphService.NAME); public RexsterGraphResource(GraphService graphService) {
this.graphService = graphService;
/*graphService = Services.get().getService(TitanGraphService.NAME);
if (graphService == null) { if (graphService == null) {
throw new WebApplicationException(Response throw new WebApplicationException(Response
.status(Response.Status.INTERNAL_SERVER_ERROR) .status(Response.Status.INTERNAL_SERVER_ERROR)
.tag("graph service is not initialized") .tag("graph service is not initialized")
.build()); .build());
} }*/
} }
protected Graph getGraph() { protected Graph getGraph() {
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*; import javax.ws.rs.*;
import javax.ws.rs.core.Context; import javax.ws.rs.core.Context;
...@@ -31,6 +32,7 @@ import javax.ws.rs.core.Response; ...@@ -31,6 +32,7 @@ import javax.ws.rs.core.Response;
* e.g. a Hive table * e.g. a Hive table
*/ */
@Path("types") @Path("types")
@Singleton
public class TypesResource { public class TypesResource {
@POST @POST
......
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<!-- <!-- ~ Licensed to the Apache Software Foundation (ASF) under one ~ or more
~ Licensed to the Apache Software Foundation (ASF) under one contributor license agreements. See the NOTICE file ~ distributed with this
~ or more contributor license agreements. See the NOTICE file work for additional information ~ regarding copyright ownership. The ASF
~ distributed with this work for additional information licenses this file ~ to you under the Apache License, Version 2.0 (the ~
~ regarding copyright ownership. The ASF licenses this file "License"); you may not use this file except in compliance ~ with the License.
~ to you under the Apache License, Version 2.0 (the You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0
~ "License"); you may not use this file except in compliance ~ ~ Unless required by applicable law or agreed to in writing, software ~
~ with the License. You may obtain a copy of the License at distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT
~ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the
~ http://www.apache.org/licenses/LICENSE-2.0 License for the specific language governing permissions and ~ limitations
~ under the License. -->
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" <!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
"http://java.sun.com/dtd/web-app_2_3.dtd"> "http://java.sun.com/dtd/web-app_2_3.dtd">
<web-app> <web-app>
<display-name>Apache Metadata Placeholder</display-name>
<description>Apache Metadata Placeholder</description>
<display-name>Apache Falcon Placeholder</display-name> <context-param>
<description>Apache Falcon Placeholder</description> <param-name>guice.packages</param-name>
<param-value>org.apache.hadoop.metadata.web.resources,org.apache.hadoop.metadata.web.params</param-value>
</context-param>
<filter> <!--
<filter-name>audit</filter-name> More information can be found here:
<filter-class>org.apache.hadoop.metadata.web.filters.AuditFilter</filter-class>
</filter> https://jersey.java.net/nonav/apidocs/1.11/contribs/jersey-guice/com/sun/jersey/guice/spi/container/servlet/package-summary.html
-->
<filter> <filter>
<filter-name>authentication</filter-name> <filter-name>guiceFilter</filter-name>
<filter-class>org.apache.hadoop.metadata.web.filters.AuthenticationFilter</filter-class> <filter-class>com.google.inject.servlet.GuiceFilter</filter-class>
</filter> </filter>
<filter-mapping> <filter-mapping>
<filter-name>audit</filter-name> <filter-name>guiceFilter</filter-name>
<servlet-name>MetadataRESTApi</servlet-name> <url-pattern>/*</url-pattern>
</filter-mapping>
<filter-mapping>
<filter-name>authentication</filter-name>
<servlet-name>MetadataRESTApi</servlet-name>
</filter-mapping> </filter-mapping>
<listener> <listener>
<listener-class>org.apache.hadoop.metadata.web.listeners.ApplicationStartupListener</listener-class> <listener-class>org.apache.hadoop.metadata.web.listeners.ApplicationStartupListener</listener-class>
</listener> </listener>
<listener>
<servlet> <listener-class>org.apache.hadoop.metadata.web.listeners.GuiceServletConfig</listener-class>
<servlet-name>MetadataRESTApi</servlet-name> </listener>
<servlet-class>com.sun.jersey.spi.container.servlet.ServletContainer</servlet-class>
<init-param>
<param-name>com.sun.jersey.config.property.resourceConfigClass</param-name>
<param-value>com.sun.jersey.api.core.PackagesResourceConfig</param-value>
</init-param>
<init-param>
<param-name>com.sun.jersey.config.property.packages</param-name>
<param-value>
org.apache.hadoop.metadata.web.resources,org.apache.hadoop.metadata.web.params
</param-value>
</init-param>
<load-on-startup>1</load-on-startup>
</servlet>
<servlet-mapping>
<servlet-name>MetadataRESTApi</servlet-name>
<url-pattern>/api/metadata/*</url-pattern>
</servlet-mapping>
</web-app> </web-app>
package org.apache.hadoop.metadata;
import org.apache.hadoop.metadata.service.Services;
import org.apache.hadoop.metadata.services.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.services.TitanGraphService;
import org.json.simple.JSONValue;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.Map;
public class GraphRepositoryServiceIT {
private static final String ENTITY_NAME = "clicks-table";
private static final String ENTITY_TYPE = "hive-table";
private static final String DATABASE_NAME = "ads";
private static final String TABLE_NAME = "clicks-table";
@BeforeClass
public void setUp() throws Exception {
TitanGraphService titanGraphService = new TitanGraphService();
titanGraphService.start();
Services.get().register(titanGraphService);
GraphBackedMetadataRepository repositoryService
= new GraphBackedMetadataRepository();
repositoryService.start();
Services.get().register(repositoryService);
}
@AfterClass
public void tearDown() throws Exception {
Services.get().getService(GraphBackedMetadataRepository.NAME).close();
Services.get().getService(TitanGraphService.NAME).close();
Services.get().reset();
}
/*
@Test
public void testRepository() throws Exception {
GraphBackedMetadataRepositoryService repositoryService =
Services.get().getService(GraphBackedMetadataRepositoryService.NAME);
String entityStream = getTestEntityJSON();
String guid = repositoryService.createEntity(entityStream, ENTITY_TYPE);
Assert.assertNotNull(guid);
String entity = repositoryService.getEntityDefinition(ENTITY_NAME, ENTITY_TYPE);
@SuppressWarnings("unchecked")
Map<String, String> entityProperties =
(Map<String, String>) JSONValue.parseWithException(entity);
Assert.assertEquals(entityProperties.get("guid"), guid);
Assert.assertEquals(entityProperties.get("entityName"), ENTITY_NAME);
Assert.assertEquals(entityProperties.get("entityType"), ENTITY_TYPE);
Assert.assertEquals(entityProperties.get("database"), DATABASE_NAME);
Assert.assertEquals(entityProperties.get("table"), TABLE_NAME);
}
private String getTestEntityJSON() {
Map<String, String> props = new HashMap<>();
props.put("entityName", ENTITY_NAME);
props.put("entityType", ENTITY_TYPE);
props.put("database", DATABASE_NAME);
props.put("table", TABLE_NAME);
return JSONValue.toJSONString(props);
}
*/
}
package org.apache.hadoop.metadata;
import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.TransactionalGraph;
import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.services.TitanGraphService;
import org.apache.hadoop.metadata.util.GraphUtils;
import org.testng.annotations.Test;
import java.util.Iterator;
import java.util.UUID;
/**
* End to end graph put/get test.
*/
public class TitanGraphServiceIT {
@Test
public void testTitanGraph() throws Exception {
TitanGraphService titanGraphService = new TitanGraphService();
titanGraphService.start();
try {
String guid = UUID.randomUUID().toString();
final TransactionalGraph graph = titanGraphService.getTransactionalGraph();
System.out.println("graph = " + graph);
System.out.println("graph.getVertices() = " + graph.getVertices());
Vertex entityVertex = null;
try {
graph.rollback();
entityVertex = graph.addVertex(null);
entityVertex.setProperty("guid", guid);
entityVertex.setProperty("entityName", "entityName");
entityVertex.setProperty("entityType", "entityType");
} catch (Exception e) {
graph.rollback();
e.printStackTrace();
} finally {
graph.commit();
}
System.out.println("vertex = " + GraphUtils.vertexString(entityVertex));
GraphQuery query = graph.query()
.has("entityName", "entityName")
.has("entityType", "entityType");
Iterator<Vertex> results = query.vertices().iterator();
if (results.hasNext()) {
Vertex vertexFromQuery = results.next();
System.out.println("vertex = " + GraphUtils.vertexString(vertexFromQuery));
}
} finally {
Thread.sleep(1000);
titanGraphService.stop();
}
}
}
...@@ -18,18 +18,22 @@ ...@@ -18,18 +18,22 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import com.sun.jersey.api.client.ClientResponse; import java.util.HashMap;
import com.sun.jersey.api.client.WebResource; import java.util.Map;
import org.json.simple.JSONValue; import java.util.UUID;
import org.testng.Assert;
import org.testng.annotations.Test;
import javax.ws.rs.HttpMethod; import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.util.HashMap;
import java.util.Map; import org.json.simple.JSONValue;
import java.util.UUID; import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.gson.JsonElement;
import com.google.gson.JsonParser;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
/** /**
* Integration tests for Entity Jersey Resource. * Integration tests for Entity Jersey Resource.
...@@ -44,6 +48,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -44,6 +48,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@Test @Test
public void testSubmitEntity() { public void testSubmitEntity() {
String entityStream = getTestEntityJSON(); String entityStream = getTestEntityJSON();
JsonParser parser = new JsonParser();
WebResource resource = service WebResource resource = service
.path("api/metadata/entities/submit") .path("api/metadata/entities/submit")
...@@ -54,11 +59,15 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -54,11 +59,15 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, entityStream); .method(HttpMethod.POST, ClientResponse.class, entityStream);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String response = clientResponse.getEntity(String.class); String response = clientResponse.getEntity(String.class);
Assert.assertNotNull(response); Assert.assertNotNull(response);
JsonElement elem = parser.parse(response);
String guid = elem.getAsJsonObject().get("GUID").getAsString();
try { try {
Assert.assertNotNull(UUID.fromString(response)); Assert.assertNotNull(UUID.fromString(guid));
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
Assert.fail("Response is not a guid, " + response); Assert.fail("Response is not a guid, " + response);
} }
......
...@@ -16,40 +16,7 @@ ...@@ -16,40 +16,7 @@
# limitations under the License. # limitations under the License.
# #
application.services=org.apache.hadoop.metadata.services.TitanGraphService,\ # GraphService implementation
org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService,\ metadata.graph.impl.class=org.apache.hadoop.metadata.services.TitanGraphService
org.apache.hadoop.metadata.services.DefaultMetadataService
######### Implementation classes #########
## DO NOT MODIFY UNLESS SURE ABOUT CHANGE ##
metadata.GraphService.impl=org.apache.hadoop.metadata.services.TitanGraphService
metadata.MetadataRepositoryService.impl=org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService
######### Implementation classes #########
######### Graph Database Configs #########
# Graph implementation
#metadata.graph.blueprints.graph=com.thinkaurelius.titan.core.TitanFactory
# Graph Storage
metadata.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=target/data/berkeley
# Graph Search Index
metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=target/data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
######### Graph Database Configs #########
######### Security Properties #########
# SSL config
metadata.enableTLS=false metadata.enableTLS=false
######### Security Properties #########
storage.backend=inmemory
# Graph Search Index
index.search.backend=elasticsearch
index.search.directory=target/data/es
index.search.elasticsearch.client-only=false
index.search.elasticsearch.local-mode=true
\ No newline at end of file
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is formatted as follows:
# metadata.indexer.vertex.property.name.<index>=<Property Name>
# metadata.indexer.vertex.property.type.<index>=<Data Type>
metadata.indexer.vertex.property.name.0=DESC
metadata.indexer.vertex.property.type.0=String
metadata.indexer.vertex.property.name.1=DB_LOCATION_URI
metadata.indexer.vertex.property.type.1=String
metadata.indexer.vertex.property.name.2=NAME
metadata.indexer.vertex.property.type.2=String
metadata.indexer.vertex.property.name.3=OWNER_NAME
metadata.indexer.vertex.property.type.3=String
metadata.indexer.vertex.property.name.4=TBL_NAME
metadata.indexer.vertex.property.type.4=String
metadata.indexer.vertex.property.name.5=COMMENT
metadata.indexer.vertex.property.type.5=String
metadata.indexer.vertex.property.name.6=COLUMN_NAME
metadata.indexer.vertex.property.type.6=String
metadata.indexer.vertex.property.name.7=TYPE_NAME
metadata.indexer.vertex.property.type.7=String
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment