Commit c1af1456 by Dan Markwat

Guice-enabled project. Everything compiles and unit tests. Trouble

with integration tests (specifically the metadata.keystore file & keystore plugin), but int tests are otherwise loading Guice dependency-injected instances correctly.
parent e0423116
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-governance</artifactId>
<version>0.1-incubating-SNAPSHOT</version>
</parent>
<artifactId>metadata-repository</artifactId>
<description>Apache Metadata Repository Module</description>
<name>Apache Metadata Repository</name>
<packaging>jar</packaging>
<profiles>
<profile>
<id>hadoop-2</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<classifier>tests</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<classifier>tests</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
</dependencies>
</profile>
</profiles>
<dependencies>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-common</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
</dependency>
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
</dependency>
<dependency>
<groupId>com.tinkerpop.blueprints</groupId>
<artifactId>blueprints-core</artifactId>
</dependency>
<dependency>
<groupId>com.thinkaurelius.titan</groupId>
<artifactId>titan-core</artifactId>
</dependency>
<dependency>
<groupId>com.thinkaurelius.titan</groupId>
<artifactId>titan-berkeleyje</artifactId>
</dependency>
<dependency>
<groupId>com.thinkaurelius.titan</groupId>
<artifactId>titan-es</artifactId>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.4</version>
<configuration>
<excludes>
<exclude>**/log4j.xml</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<!-- ~ Licensed to the Apache Software Foundation (ASF) under one ~ or more
contributor license agreements. See the NOTICE file ~ distributed with this
work for additional information ~ regarding copyright ownership. The ASF
licenses this file ~ to you under the Apache License, Version 2.0 (the ~
"License"); you may not use this file except in compliance ~ with the License.
You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0
~ ~ Unless required by applicable law or agreed to in writing, software ~
distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the
License for the specific language governing permissions and ~ limitations
under the License. -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-governance</artifactId>
<version>0.1-incubating-SNAPSHOT</version>
</parent>
<artifactId>metadata-repository</artifactId>
<description>Apache Metadata Repository Module</description>
<name>Apache Metadata Repository</name>
<packaging>jar</packaging>
<profiles>
<profile>
<id>hadoop-2</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<classifier>tests</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<classifier>tests</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
</dependencies>
</profile>
</profiles>
<dependencies>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-common</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-throwingproviders</artifactId>
<version>3.0</version>
</dependency>
<dependency>
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
</dependency>
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
</dependency>
<dependency>
<groupId>com.tinkerpop.blueprints</groupId>
<artifactId>blueprints-core</artifactId>
</dependency>
<dependency>
<groupId>com.thinkaurelius.titan</groupId>
<artifactId>titan-core</artifactId>
</dependency>
<dependency>
<groupId>com.thinkaurelius.titan</groupId>
<artifactId>titan-berkeleyje</artifactId>
</dependency>
<dependency>
<groupId>com.thinkaurelius.titan</groupId>
<artifactId>titan-es</artifactId>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.4</version>
<configuration>
<excludes>
<exclude>**/log4j.xml</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</project>
......@@ -24,12 +24,45 @@
*/
package org.apache.hadoop.metadata;
import org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService;
import org.apache.hadoop.metadata.services.GraphProvider;
import org.apache.hadoop.metadata.services.GraphService;
import org.apache.hadoop.metadata.services.GraphServiceConfigurator;
import org.apache.hadoop.metadata.services.MetadataRepositoryService;
import org.apache.hadoop.metadata.services.TitanGraphProvider;
import com.google.inject.throwingproviders.ThrowingProviderBinder;
import com.thinkaurelius.titan.core.TitanGraph;
/**
* Guice module for Repository module.
*/
public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
protected void configure() {
// add configuration logic here
}
// Graph Service implementation class
private Class<? extends GraphService> graphServiceClass;
// MetadataRepositoryService implementation class
private Class<? extends MetadataRepositoryService> metadataRepoClass;
public RepositoryMetadataModule() {
GraphServiceConfigurator gsp = new GraphServiceConfigurator();
this.graphServiceClass = gsp.getImplClass();
this.metadataRepoClass = GraphBackedMetadataRepositoryService.class;
}
protected void configure() {
// special wiring for Titan Graph
ThrowingProviderBinder.create(binder())
.bind(GraphProvider.class, TitanGraph.class)
.to(TitanGraphProvider.class);
// allow for dynamic binding of the metadata repo service & graph
// service
// bind the MetadataRepositoryService interface to an implementation
bind(MetadataRepositoryService.class).to(metadataRepoClass);
// bind the GraphService interface to an implementation
bind(GraphService.class).to(graphServiceClass);
}
}
......@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.TransactionalGraph;
import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.service.Services;
import org.apache.hadoop.metadata.util.GraphUtils;
import org.json.simple.JSONValue;
......@@ -34,6 +35,8 @@ import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.inject.Inject;
/**
* An implementation backed by Titan Graph DB.
*/
......@@ -44,6 +47,11 @@ public class GraphBackedMetadataRepositoryService implements MetadataRepositoryS
public static final String NAME = GraphBackedMetadataRepositoryService.class.getSimpleName();
private GraphService graphService;
@Inject
GraphBackedMetadataRepositoryService(GraphService service) {
this.graphService = service;
}
/**
* Name of the service.
......@@ -62,11 +70,6 @@ public class GraphBackedMetadataRepositoryService implements MetadataRepositoryS
*/
@Override
public void start() throws Exception {
if (Services.get().isRegistered(TitanGraphService.NAME)) {
graphService = Services.get().getService(TitanGraphService.NAME);
} else {
throw new RuntimeException("graph service is not initialized");
}
}
/**
......
package org.apache.hadoop.metadata.services;
import org.apache.commons.configuration.ConfigurationException;
import com.google.inject.throwingproviders.CheckedProvider;
import com.tinkerpop.blueprints.Graph;
public interface GraphProvider<T extends Graph> extends CheckedProvider<T> {
@Override
T get() throws ConfigurationException;
}
package org.apache.hadoop.metadata.services;
public class GraphServiceConfigurator extends PropertyBasedConfigurator<GraphService> {
private static final String PROPERTY_NAME = "metadata.graph.impl.class";
private static final String DEFAULT_IMPL_CLASS = "no.default.graph.class";
private static final String CONFIG_PATH = "metadata.graph.properties";
public GraphServiceConfigurator() {
super("metadata.graph.propertyName", "metadata.graph.defaultImplClass",
"metadata.graph.configurationPath", PROPERTY_NAME,
DEFAULT_IMPL_CLASS, CONFIG_PATH);
}
}
package org.apache.hadoop.metadata.services;
import java.util.Properties;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
public abstract class PropertyBasedConfigurator<T> {
private final String propertyName;
private final String defaultImplClass;
private final String configurationPath;
PropertyBasedConfigurator(String propertyNameProp, String defaultImplClassProp,
String configurationPathProp, String propertyNameDefaultProp,
String defaultImplClassDefaultProp, String configPathDefaultProp) {
Properties props = System.getProperties();
this.propertyName = props.getProperty(propertyNameProp,
propertyNameDefaultProp);
this.defaultImplClass = props.getProperty(defaultImplClassProp,
defaultImplClassDefaultProp);
this.configurationPath = props.getProperty(configurationPathProp,
configPathDefaultProp);
}
PropertyBasedConfigurator(String propertyNameProp, String defaultImplClassProp,
String configurationPathProp) {
Properties props = System.getProperties();
this.propertyName = props.getProperty(propertyNameProp);
this.defaultImplClass = props.getProperty(defaultImplClassProp);
this.configurationPath = props.getProperty(configurationPathProp);
}
public String getPropertyName() {
return propertyName;
}
public String getDefaultImplClass() {
return defaultImplClass;
}
public String getConfigurationPath() {
return configurationPath;
}
public Configuration getConfiguration() {
String path = getConfigurationPath();
Configuration config = null;
try {
config = new PropertiesConfiguration(path);
} catch (ConfigurationException e) {
config = new PropertiesConfiguration();
}
return config;
}
public String getClassName() {
Configuration config = getConfiguration();
String propName = getPropertyName();
String defaultClass = getDefaultImplClass();
return config.getString(propName, defaultClass);
}
@SuppressWarnings("unchecked")
public Class<? extends T> getImplClass() {
String className = getClassName();
Class<? extends T> ret = null;
try {
ret = (Class<? extends T>) PropertyBasedConfigurator.class
.getClassLoader().loadClass(className);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
return ret;
}
}
package org.apache.hadoop.metadata.services;
import javax.inject.Singleton;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import com.thinkaurelius.titan.core.TitanFactory;
import com.thinkaurelius.titan.core.TitanGraph;
public class TitanGraphProvider implements GraphProvider<TitanGraph> {
private static final String SYSTEM_PROP = "";
private static final String DEFAULT_PATH = "graph.properties";
private final String configPath;
public TitanGraphProvider() {
configPath = System.getProperties().getProperty(SYSTEM_PROP,
DEFAULT_PATH);
}
public Configuration getConfiguration() throws ConfigurationException {
return new PropertiesConfiguration(configPath);
}
@Override
@Singleton
public TitanGraph get() throws ConfigurationException {
TitanGraph graph = null;
Configuration config;
try {
config = getConfiguration();
} catch (ConfigurationException e) {
throw new RuntimeException(e);
}
graph = TitanFactory.open(config);
return graph;
}
}
package org.apache.hadoop.metadata;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
public abstract class GuiceEnabledTestBase {
/*
* Guice.createInjector() takes your Modules, and returns a new Injector
* instance. Most applications will call this method exactly once, in their
* main() method.
*/
public final Injector injector;
GuiceEnabledTestBase() {
injector = Guice.createInjector();
}
GuiceEnabledTestBase(Module... modules) {
injector = Guice.createInjector(modules);
}
}
package org.apache.hadoop.metadata;
public abstract class RepositoryModuleBaseTest extends GuiceEnabledTestBase {
public RepositoryModuleBaseTest() {
super(new RepositoryMetadataModule());
}
}
package org.apache.hadoop.metadata;
import junit.framework.Assert;
import org.apache.hadoop.metadata.services.GraphService;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Unit test for Guice injector service loading
*/
public class RepositoryServiceLoadingTest extends GuiceEnabledTestBase {
public RepositoryServiceLoadingTest() {
super(new RepositoryMetadataModule());
}
@BeforeClass
public void setUp() throws Exception {
}
@AfterClass
public void tearDown() throws Exception {
}
@Test
public void testGetGraphService() throws Exception {
/*
* Now that we've got the injector, we can build objects.
*/
GraphService gs = injector.getInstance(GraphService.class);
Assert.assertNotNull(gs);
}
}
package org.apache.hadoop.metadata.services;
import org.apache.hadoop.metadata.service.Services;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.metadata.RepositoryModuleBaseTest;
import org.json.simple.JSONValue;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class GraphBackedMetadataRepositoryServiceTest {
public class GraphBackedMetadataRepositoryServiceTest extends RepositoryModuleBaseTest {
private static final String ENTITY_NAME = "clicks-table";
private static final String ENTITY_TYPE = "hive-table";
private static final String DATABASE_NAME = "ads";
private static final String TABLE_NAME = "clicks-table";
private static final String ENTITY_NAME = "clicks-table";
private static final String ENTITY_TYPE = "hive-table";
private static final String DATABASE_NAME = "ads";
private static final String TABLE_NAME = "clicks-table";
private TitanGraphService titanGraphService;
private GraphBackedMetadataRepositoryService repositoryService;
private TitanGraphService titanGraphService;
private GraphBackedMetadataRepositoryService repositoryService;
@BeforeClass
public void setUp() throws Exception {
titanGraphService = new TitanGraphService();
titanGraphService.start();
Services.get().register(titanGraphService);
@BeforeClass
public void setUp() throws Exception {
titanGraphService = super.injector.getInstance(TitanGraphService.class);
titanGraphService.start();
//Services.get().register(titanGraphService);
repositoryService = new GraphBackedMetadataRepositoryService();
repositoryService.start();
Services.get().register(repositoryService);
}
repositoryService = super.injector.getInstance(GraphBackedMetadataRepositoryService.class);
repositoryService.start();
//Services.get().register(repositoryService);
}
@AfterClass
public void tearDown() throws Exception {
Services.get().getService(GraphBackedMetadataRepositoryService.NAME).close();
Services.get().getService(TitanGraphService.NAME).close();
Services.get().reset();
}
@AfterClass
public void tearDown() throws Exception {
/*Services.get().getService(GraphBackedMetadataRepositoryService.NAME)
.close();
Services.get().getService(TitanGraphService.NAME).close();
Services.get().reset();*/
}
@Test
public void testGetName() throws Exception {
Assert.assertEquals(GraphBackedMetadataRepositoryService.NAME,
GraphBackedMetadataRepositoryService.class.getSimpleName());
Assert.assertEquals(repositoryService.getName(), GraphBackedMetadataRepositoryService.NAME);
}
@Test
public void testGetName() throws Exception {
Assert.assertEquals(GraphBackedMetadataRepositoryService.NAME,
GraphBackedMetadataRepositoryService.class.getSimpleName());
Assert.assertEquals(repositoryService.getName(),
GraphBackedMetadataRepositoryService.NAME);
}
@Test
public void testSubmitEntity() throws Exception {
String entityStream = getTestEntityJSON();
String guid = repositoryService.submitEntity(entityStream, ENTITY_TYPE);
Assert.assertNotNull(guid);
}
@Test
public void testSubmitEntity() throws Exception {
String entityStream = getTestEntityJSON();
String guid = repositoryService.submitEntity(entityStream, ENTITY_TYPE);
Assert.assertNotNull(guid);
}
private String getTestEntityJSON() {
Map<String, String> props = new HashMap<>();
props.put("entityName", ENTITY_NAME);
props.put("entityType", ENTITY_TYPE);
props.put("database", DATABASE_NAME);
props.put("table", TABLE_NAME);
return JSONValue.toJSONString(props);
}
private String getTestEntityJSON() {
Map<String, String> props = new HashMap<>();
props.put("entityName", ENTITY_NAME);
props.put("entityType", ENTITY_TYPE);
props.put("database", DATABASE_NAME);
props.put("table", TABLE_NAME);
return JSONValue.toJSONString(props);
}
@Test (dependsOnMethods = "testSubmitEntity")
public void testGetEntityDefinition() throws Exception {
String entity = repositoryService.getEntityDefinition(ENTITY_NAME, ENTITY_TYPE);
Map<String, String> entityProperties =
(Map<String, String>) JSONValue.parseWithException(entity);
Assert.assertNotNull(entityProperties.get("guid"));
Assert.assertEquals(entityProperties.get("entityName"), ENTITY_NAME);
Assert.assertEquals(entityProperties.get("entityType"), ENTITY_TYPE);
Assert.assertEquals(entityProperties.get("database"), DATABASE_NAME);
Assert.assertEquals(entityProperties.get("table"), TABLE_NAME);
}
@Test(dependsOnMethods = "testSubmitEntity")
public void testGetEntityDefinition() throws Exception {
String entity = repositoryService.getEntityDefinition(ENTITY_NAME,
ENTITY_TYPE);
Map<String, String> entityProperties = (Map<String, String>) JSONValue
.parseWithException(entity);
Assert.assertNotNull(entityProperties.get("guid"));
Assert.assertEquals(entityProperties.get("entityName"), ENTITY_NAME);
Assert.assertEquals(entityProperties.get("entityType"), ENTITY_TYPE);
Assert.assertEquals(entityProperties.get("database"), DATABASE_NAME);
Assert.assertEquals(entityProperties.get("table"), TABLE_NAME);
}
@Test
public void testGetEntityDefinitionNonExistent() throws Exception {
String entity = repositoryService.getEntityDefinition("blah", "blah");
Assert.assertNull(entity);
}
@Test
public void testGetEntityDefinitionNonExistent() throws Exception {
String entity = repositoryService.getEntityDefinition("blah", "blah");
Assert.assertNull(entity);
}
@Test
public void testGetEntityList() throws Exception {
List<String> entityList = repositoryService.getEntityList(ENTITY_TYPE);
Assert.assertNotNull(entityList);
Assert.assertEquals(entityList.size(), 0); // as this is not implemented yet
}
@Test
public void testGetEntityList() throws Exception {
List<String> entityList = repositoryService.getEntityList(ENTITY_TYPE);
Assert.assertNotNull(entityList);
Assert.assertEquals(entityList.size(), 0); // as this is not implemented
// yet
}
@Test (expectedExceptions = RuntimeException.class)
public void testStartWithOutGraphServiceRegistration() throws Exception {
try {
Services.get().reset();
GraphBackedMetadataRepositoryService repositoryService = new
GraphBackedMetadataRepositoryService();
repositoryService.start();
Assert.fail("This should have thrown an exception");
} finally {
Services.get().register(titanGraphService);
Services.get().register(repositoryService);
}
}
/*@Test(expectedExceptions = RuntimeException.class)
public void testStartWithOutGraphServiceRegistration() throws Exception {
try {
//Services.get().reset();
GraphBackedMetadataRepositoryService repositoryService = new GraphBackedMetadataRepositoryService();
repositoryService.start();
Assert.fail("This should have thrown an exception");
} finally {
//Services.get().register(titanGraphService);
//Services.get().register(repositoryService);
}
}*/
}
package org.apache.hadoop.metadata.services;
import org.apache.hadoop.metadata.RepositoryModuleBaseTest;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
......@@ -8,13 +9,14 @@ import org.testng.annotations.Test;
/**
* Unit test for TitanGraphService.
*/
public class TitanGraphServiceTest {
public class TitanGraphServiceTest extends RepositoryModuleBaseTest {
private TitanGraphService titanGraphService;
@BeforeClass
public void setUp() throws Exception {
titanGraphService = new TitanGraphService();
titanGraphService = super.injector.getInstance(TitanGraphService.class);
//titanGraphService = new TitanGraphService();
titanGraphService.start();
}
......
storage.backend=inmemory
# Graph Search Index
index.search.backend=elasticsearch
index.search.directory=target/data/es
index.search.elasticsearch.client-only=false
index.search.elasticsearch.local-mode=true
\ No newline at end of file
metadata.graph.impl.class=org.apache.hadoop.metadata.services.TitanGraphService
\ No newline at end of file
......@@ -127,6 +127,16 @@
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-servlet</artifactId>
<version>3.0</version>
</dependency>
<dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-guice</artifactId>
<version>1.18.3</version>
</dependency>
</dependencies>
<build>
......
package org.apache.hadoop.metadata.web.listeners;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.servlet.GuiceServletContextListener;
import com.sun.jersey.api.core.PackagesResourceConfig;
import com.sun.jersey.guice.JerseyServletModule;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
public class GuiceServletConfig extends GuiceServletContextListener {
private static final String GUICE_CTX_PARAM = "guice.packages";
@Override
protected Injector getInjector() {
/*
* More information on this can be found here:
* https://jersey.java.net/nonav/apidocs/1.11/contribs/jersey-guice/com/sun/jersey/guice/spi/container/servlet/package-summary.html
*/
return Guice.createInjector(
new RepositoryMetadataModule(),
new JerseyServletModule() {
@Override
protected void configureServlets() {
String packages = getServletContext().getInitParameter(GUICE_CTX_PARAM);
Map<String, String> params = new HashMap<String, String>();
params.put(PackagesResourceConfig.PROPERTY_PACKAGES, packages);
serve("/*").with(GuiceContainer.class, params);
}
});
}
}
......@@ -23,6 +23,7 @@ import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import javax.inject.Singleton;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
......@@ -34,6 +35,7 @@ import javax.ws.rs.core.Response;
* Jersey Resource for admin operations.
*/
@Path("admin")
@Singleton
public class AdminResource {
@GET
......
......@@ -18,16 +18,11 @@
package org.apache.hadoop.metadata.web.resources;
import com.google.common.base.Preconditions;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.metadata.service.Services;
import org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService;
import org.apache.hadoop.metadata.services.MetadataRepositoryService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.json.simple.JSONValue;
import org.json.simple.parser.ParseException;
import java.io.IOException;
import java.io.StringWriter;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
......@@ -42,8 +37,15 @@ import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.metadata.services.MetadataRepositoryService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.json.simple.JSONValue;
import org.json.simple.parser.ParseException;
import com.google.common.base.Preconditions;
/**
* Entity management operations as REST API.
......@@ -52,15 +54,14 @@ import java.io.StringWriter;
* of the Type they correspond with.
*/
@Path("entities")
@Singleton
public class EntityResource {
private MetadataRepositoryService repositoryService;
private final MetadataRepositoryService repositoryService;
public EntityResource() {
repositoryService = Services.get().getService(GraphBackedMetadataRepositoryService.NAME);
if (repositoryService == null) {
throw new RuntimeException("graph service is not initialized");
}
@Inject
public EntityResource(MetadataRepositoryService repositoryService) {
this.repositoryService = repositoryService;
}
@POST
......
......@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.web.resources;
import javax.inject.Singleton;
import javax.ws.rs.Path;
/**
......@@ -30,5 +31,6 @@ import javax.ws.rs.Path;
* 'search': find entities generated by Hive processes or that were generated by Sqoop, etc.
*/
@Path("discovery")
@Singleton
public class MetadataDiscoveryResource {
}
......@@ -18,25 +18,12 @@
package org.apache.hadoop.metadata.web.resources;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Element;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.VertexQuery;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONMode;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONUtility;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.service.Services;
import org.apache.hadoop.metadata.services.GraphService;
import org.apache.hadoop.metadata.services.TitanGraphService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
......@@ -46,9 +33,24 @@ import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.services.GraphService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Element;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.VertexQuery;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONMode;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONUtility;
/**
* Jersey Resource for lineage metadata operations.
......@@ -59,22 +61,25 @@ import java.util.Set;
* for accessing the backend graph.
*/
@Path("graph")
@Singleton
public class RexsterGraphResource {
private static final Logger LOG = LoggerFactory.getLogger(RexsterGraphResource.class);
public static final String RESULTS = "results";
public static final String TOTAL_SIZE = "totalSize";
private GraphService graphService;
private final GraphService graphService;
public RexsterGraphResource() {
graphService = Services.get().getService(TitanGraphService.NAME);
@Inject
public RexsterGraphResource(GraphService graphService) {
this.graphService = graphService;
/*graphService = Services.get().getService(TitanGraphService.NAME);
if (graphService == null) {
throw new WebApplicationException(Response
.status(Response.Status.INTERNAL_SERVER_ERROR)
.tag("graph service is not initialized")
.build());
}
}*/
}
protected Graph getGraph() {
......
......@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.web.resources;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
......@@ -31,6 +32,7 @@ import javax.ws.rs.core.Response;
* e.g. a Hive table
*/
@Path("types")
@Singleton
public class TypesResource {
@POST
......
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!-- ~ Licensed to the Apache Software Foundation (ASF) under one ~ or more
contributor license agreements. See the NOTICE file ~ distributed with this
work for additional information ~ regarding copyright ownership. The ASF
licenses this file ~ to you under the Apache License, Version 2.0 (the ~
"License"); you may not use this file except in compliance ~ with the License.
You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0
~ ~ Unless required by applicable law or agreed to in writing, software ~
distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the
License for the specific language governing permissions and ~ limitations
under the License. -->
<!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
"http://java.sun.com/dtd/web-app_2_3.dtd">
<web-app>
<display-name>Apache Metadata Placeholder</display-name>
<description>Apache Metadata Placeholder</description>
<context-param>
<param-name>guice.packages</param-name>
<param-value>org.apache.hadoop.metadata.web.resources,org.apache.hadoop.metadata.web.params</param-value>
</context-param>
<!--
More information can be found here:
https://jersey.java.net/nonav/apidocs/1.11/contribs/jersey-guice/com/sun/jersey/guice/spi/container/servlet/package-summary.html
-->
<filter>
<filter-name>guiceFilter</filter-name>
<filter-class>com.google.inject.servlet.GuiceFilter</filter-class>
</filter>
<display-name>Apache Falcon Placeholder</display-name>
<description>Apache Falcon Placeholder</description>
<filter-mapping>
<filter-name>guiceFilter</filter-name>
<url-pattern>/*</url-pattern>
</filter-mapping>
<filter>
<filter-name>audit</filter-name>
<filter-class>org.apache.hadoop.metadata.web.filters.AuditFilter</filter-class>
</filter>
<listener>
<listener-class>org.apache.hadoop.metadata.web.listeners.GuiceServletConfig</listener-class>
</listener>
<filter>
<filter-name>authentication</filter-name>
<filter-class>org.apache.hadoop.metadata.web.filters.AuthenticationFilter</filter-class>
</filter>
<!--
<filter>
<filter-name>audit</filter-name>
<filter-class>org.apache.hadoop.metadata.web.filters.AuditFilter</filter-class>
</filter>
<filter-mapping>
<filter-name>audit</filter-name>
<servlet-name>MetadataRESTApi</servlet-name>
</filter-mapping>
<filter>
<filter-name>authentication</filter-name>
<filter-class>org.apache.hadoop.metadata.web.filters.AuthenticationFilter</filter-class>
</filter>
<filter-mapping>
<filter-name>authentication</filter-name>
<servlet-name>MetadataRESTApi</servlet-name>
</filter-mapping>
<filter-mapping>
<filter-name>audit</filter-name>
<servlet-name>MetadataRESTApi</servlet-name>
</filter-mapping>
<listener>
<listener-class>org.apache.hadoop.metadata.web.listeners.ApplicationStartupListener</listener-class>
</listener>
<filter-mapping>
<filter-name>authentication</filter-name>
<servlet-name>MetadataRESTApi</servlet-name>
</filter-mapping>
<servlet>
<servlet-name>MetadataRESTApi</servlet-name>
<servlet-class>com.sun.jersey.spi.container.servlet.ServletContainer</servlet-class>
<init-param>
<param-name>com.sun.jersey.config.property.resourceConfigClass</param-name>
<param-value>com.sun.jersey.api.core.PackagesResourceConfig</param-value>
</init-param>
<init-param>
<param-name>com.sun.jersey.config.property.packages</param-name>
<param-value>
org.apache.hadoop.metadata.web.resources,org.apache.hadoop.metadata.web.params
</param-value>
</init-param>
<load-on-startup>1</load-on-startup>
</servlet>
<listener>
<listener-class>org.apache.hadoop.metadata.web.listeners.ApplicationStartupListener</listener-class>
</listener>
<servlet-mapping>
<servlet-name>MetadataRESTApi</servlet-name>
<url-pattern>/api/metadata/*</url-pattern>
</servlet-mapping>
<servlet>
<servlet-name>MetadataRESTApi</servlet-name>
<servlet-class>com.sun.jersey.spi.container.servlet.ServletContainer</servlet-class>
<init-param>
<param-name>com.sun.jersey.config.property.resourceConfigClass</param-name>
<param-value>com.sun.jersey.api.core.PackagesResourceConfig</param-value>
</init-param>
<init-param>
<param-name>com.sun.jersey.config.property.packages</param-name>
<param-value>
org.apache.hadoop.metadata.web.resources,org.apache.hadoop.metadata.web.params
</param-value>
</init-param>
<load-on-startup>1</load-on-startup>
</servlet>
<servlet-mapping>
<servlet-name>MetadataRESTApi</servlet-name>
<url-pattern>/api/metadata/*</url-pattern>
</servlet-mapping>
-->
</web-app>
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
application.services=org.apache.hadoop.metadata.services.TitanGraphService,\
org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService
# Graph implementation
#metadata.graph.blueprints.graph=com.thinkaurelius.titan.core.TitanFactory
# Graph Storage
metadata.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=target/data/berkeley
# Graph Search Index
metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=target/data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
metadata.enableTLS=false
storage.backend=inmemory
# Graph Search Index
index.search.backend=elasticsearch
index.search.directory=target/data/es
index.search.elasticsearch.client-only=false
index.search.elasticsearch.local-mode=true
\ No newline at end of file
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is formatted as follows:
# metadata.indexer.vertex.property.name.<index>=<Property Name>
# metadata.indexer.vertex.property.type.<index>=<Data Type>
metadata.indexer.vertex.property.name.0=DESC
metadata.indexer.vertex.property.type.0=String
metadata.indexer.vertex.property.name.1=DB_LOCATION_URI
metadata.indexer.vertex.property.type.1=String
metadata.indexer.vertex.property.name.2=NAME
metadata.indexer.vertex.property.type.2=String
metadata.indexer.vertex.property.name.3=OWNER_NAME
metadata.indexer.vertex.property.type.3=String
metadata.indexer.vertex.property.name.4=TBL_NAME
metadata.indexer.vertex.property.type.4=String
metadata.indexer.vertex.property.name.5=COMMENT
metadata.indexer.vertex.property.type.5=String
metadata.indexer.vertex.property.name.6=COLUMN_NAME
metadata.indexer.vertex.property.type.6=String
metadata.indexer.vertex.property.name.7=TYPE_NAME
metadata.indexer.vertex.property.type.7=String
metadata.graph.impl.class=org.apache.hadoop.metadata.services.TitanGraphService
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment