Commit c1af1456 by Dan Markwat

Guice-enabled project. Everything compiles and unit tests. Trouble

with integration tests (specifically the metadata.keystore file & keystore plugin), but int tests are otherwise loading Guice dependency-injected instances correctly.
parent e0423116
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<!-- <!-- ~ Licensed to the Apache Software Foundation (ASF) under one ~ or more
~ Licensed to the Apache Software Foundation (ASF) under one contributor license agreements. See the NOTICE file ~ distributed with this
~ or more contributor license agreements. See the NOTICE file work for additional information ~ regarding copyright ownership. The ASF
~ distributed with this work for additional information licenses this file ~ to you under the Apache License, Version 2.0 (the ~
~ regarding copyright ownership. The ASF licenses this file "License"); you may not use this file except in compliance ~ with the License.
~ to you under the Apache License, Version 2.0 (the You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0
~ "License"); you may not use this file except in compliance ~ ~ Unless required by applicable law or agreed to in writing, software ~
~ with the License. You may obtain a copy of the License at distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT
~ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the
~ http://www.apache.org/licenses/LICENSE-2.0 License for the specific language governing permissions and ~ limitations
~ under the License. -->
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS, <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
~ See the License for the specific language governing permissions and
~ limitations under the License. <modelVersion>4.0.0</modelVersion>
--> <parent>
<groupId>org.apache.hadoop.metadata</groupId>
<project xmlns="http://maven.apache.org/POM/4.0.0" <artifactId>metadata-governance</artifactId>
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" <version>0.1-incubating-SNAPSHOT</version>
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> </parent>
<artifactId>metadata-repository</artifactId>
<modelVersion>4.0.0</modelVersion> <description>Apache Metadata Repository Module</description>
<parent> <name>Apache Metadata Repository</name>
<groupId>org.apache.hadoop.metadata</groupId> <packaging>jar</packaging>
<artifactId>metadata-governance</artifactId>
<version>0.1-incubating-SNAPSHOT</version> <profiles>
</parent> <profile>
<artifactId>metadata-repository</artifactId> <id>hadoop-2</id>
<description>Apache Metadata Repository Module</description> <activation>
<name>Apache Metadata Repository</name> <activeByDefault>true</activeByDefault>
<packaging>jar</packaging> </activation>
<dependencies>
<profiles> <dependency>
<profile> <groupId>org.apache.hadoop</groupId>
<id>hadoop-2</id> <artifactId>hadoop-client</artifactId>
<activation> </dependency>
<activeByDefault>true</activeByDefault> <dependency>
</activation> <groupId>org.apache.hadoop</groupId>
<dependencies> <artifactId>hadoop-hdfs</artifactId>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-client</artifactId> <groupId>org.apache.hadoop</groupId>
</dependency> <artifactId>hadoop-hdfs</artifactId>
<dependency> <classifier>tests</classifier>
<groupId>org.apache.hadoop</groupId> </dependency>
<artifactId>hadoop-hdfs</artifactId> <dependency>
</dependency> <groupId>org.apache.hadoop</groupId>
<dependency> <artifactId>hadoop-common</artifactId>
<groupId>org.apache.hadoop</groupId> <classifier>tests</classifier>
<artifactId>hadoop-hdfs</artifactId> </dependency>
<classifier>tests</classifier> <dependency>
</dependency> <groupId>org.apache.hadoop</groupId>
<dependency> <artifactId>hadoop-common</artifactId>
<groupId>org.apache.hadoop</groupId> </dependency>
<artifactId>hadoop-common</artifactId> </dependencies>
<classifier>tests</classifier> </profile>
</dependency> </profiles>
<dependency>
<groupId>org.apache.hadoop</groupId> <dependencies>
<artifactId>hadoop-common</artifactId> <dependency>
</dependency> <groupId>org.apache.hadoop.metadata</groupId>
</dependencies> <artifactId>metadata-common</artifactId>
</profile> </dependency>
</profiles>
<dependency>
<dependencies> <groupId>org.slf4j</groupId>
<dependency> <artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.hadoop.metadata</groupId> </dependency>
<artifactId>metadata-common</artifactId>
</dependency> <dependency>
<groupId>org.slf4j</groupId>
<dependency> <artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId> </dependency>
<artifactId>slf4j-log4j12</artifactId>
</dependency> <dependency>
<groupId>com.google.inject</groupId>
<dependency> <artifactId>guice</artifactId>
<groupId>org.slf4j</groupId> </dependency>
<artifactId>slf4j-api</artifactId>
</dependency> <dependency>
<groupId>com.google.inject.extensions</groupId>
<dependency> <artifactId>guice-throwingproviders</artifactId>
<groupId>com.google.inject</groupId> <version>3.0</version>
<artifactId>guice</artifactId> </dependency>
</dependency>
<dependency>
<dependency> <groupId>org.codehaus.jettison</groupId>
<groupId>org.codehaus.jettison</groupId> <artifactId>jettison</artifactId>
<artifactId>jettison</artifactId> </dependency>
</dependency>
<dependency>
<dependency> <groupId>com.googlecode.json-simple</groupId>
<groupId>com.googlecode.json-simple</groupId> <artifactId>json-simple</artifactId>
<artifactId>json-simple</artifactId> </dependency>
</dependency>
<dependency>
<dependency> <groupId>com.tinkerpop.blueprints</groupId>
<groupId>com.tinkerpop.blueprints</groupId> <artifactId>blueprints-core</artifactId>
<artifactId>blueprints-core</artifactId> </dependency>
</dependency>
<dependency>
<dependency> <groupId>com.thinkaurelius.titan</groupId>
<groupId>com.thinkaurelius.titan</groupId> <artifactId>titan-core</artifactId>
<artifactId>titan-core</artifactId> </dependency>
</dependency>
<dependency>
<dependency> <groupId>com.thinkaurelius.titan</groupId>
<groupId>com.thinkaurelius.titan</groupId> <artifactId>titan-berkeleyje</artifactId>
<artifactId>titan-berkeleyje</artifactId> </dependency>
</dependency>
<dependency>
<dependency> <groupId>com.thinkaurelius.titan</groupId>
<groupId>com.thinkaurelius.titan</groupId> <artifactId>titan-es</artifactId>
<artifactId>titan-es</artifactId> </dependency>
</dependency>
<dependency>
<dependency> <groupId>org.testng</groupId>
<groupId>org.testng</groupId> <artifactId>testng</artifactId>
<artifactId>testng</artifactId> </dependency>
</dependency>
<dependency>
<dependency> <groupId>org.mockito</groupId>
<groupId>org.mockito</groupId> <artifactId>mockito-all</artifactId>
<artifactId>mockito-all</artifactId> </dependency>
</dependency> </dependencies>
</dependencies>
<build>
<build> <plugins>
<plugins> <plugin>
<plugin> <groupId>org.apache.maven.plugins</groupId>
<groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId>
<artifactId>maven-compiler-plugin</artifactId> <configuration>
<configuration> <source>1.7</source>
<source>1.7</source> <target>1.7</target>
<target>1.7</target> </configuration>
</configuration> </plugin>
</plugin> <plugin>
<plugin> <groupId>org.apache.maven.plugins</groupId>
<groupId>org.apache.maven.plugins</groupId> <artifactId>maven-jar-plugin</artifactId>
<artifactId>maven-jar-plugin</artifactId> <version>2.4</version>
<version>2.4</version> <configuration>
<configuration> <excludes>
<excludes> <exclude>**/log4j.xml</exclude>
<exclude>**/log4j.xml</exclude> </excludes>
</excludes> </configuration>
</configuration> </plugin>
</plugin> </plugins>
</plugins> </build>
</build>
</project> </project>
...@@ -24,12 +24,45 @@ ...@@ -24,12 +24,45 @@
*/ */
package org.apache.hadoop.metadata; package org.apache.hadoop.metadata;
import org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService;
import org.apache.hadoop.metadata.services.GraphProvider;
import org.apache.hadoop.metadata.services.GraphService;
import org.apache.hadoop.metadata.services.GraphServiceConfigurator;
import org.apache.hadoop.metadata.services.MetadataRepositoryService;
import org.apache.hadoop.metadata.services.TitanGraphProvider;
import com.google.inject.throwingproviders.ThrowingProviderBinder;
import com.thinkaurelius.titan.core.TitanGraph;
/** /**
* Guice module for Repository module. * Guice module for Repository module.
*/ */
public class RepositoryMetadataModule extends com.google.inject.AbstractModule { public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
protected void configure() { // Graph Service implementation class
// add configuration logic here private Class<? extends GraphService> graphServiceClass;
} // MetadataRepositoryService implementation class
private Class<? extends MetadataRepositoryService> metadataRepoClass;
public RepositoryMetadataModule() {
GraphServiceConfigurator gsp = new GraphServiceConfigurator();
this.graphServiceClass = gsp.getImplClass();
this.metadataRepoClass = GraphBackedMetadataRepositoryService.class;
}
protected void configure() {
// special wiring for Titan Graph
ThrowingProviderBinder.create(binder())
.bind(GraphProvider.class, TitanGraph.class)
.to(TitanGraphProvider.class);
// allow for dynamic binding of the metadata repo service & graph
// service
// bind the MetadataRepositoryService interface to an implementation
bind(MetadataRepositoryService.class).to(metadataRepoClass);
// bind the GraphService interface to an implementation
bind(GraphService.class).to(graphServiceClass);
}
} }
...@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions; ...@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions;
import com.tinkerpop.blueprints.Graph; import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.TransactionalGraph; import com.tinkerpop.blueprints.TransactionalGraph;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.service.Services; import org.apache.hadoop.metadata.service.Services;
import org.apache.hadoop.metadata.util.GraphUtils; import org.apache.hadoop.metadata.util.GraphUtils;
import org.json.simple.JSONValue; import org.json.simple.JSONValue;
...@@ -34,6 +35,8 @@ import java.util.List; ...@@ -34,6 +35,8 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import javax.inject.Inject;
/** /**
* An implementation backed by Titan Graph DB. * An implementation backed by Titan Graph DB.
*/ */
...@@ -44,6 +47,11 @@ public class GraphBackedMetadataRepositoryService implements MetadataRepositoryS ...@@ -44,6 +47,11 @@ public class GraphBackedMetadataRepositoryService implements MetadataRepositoryS
public static final String NAME = GraphBackedMetadataRepositoryService.class.getSimpleName(); public static final String NAME = GraphBackedMetadataRepositoryService.class.getSimpleName();
private GraphService graphService; private GraphService graphService;
@Inject
GraphBackedMetadataRepositoryService(GraphService service) {
this.graphService = service;
}
/** /**
* Name of the service. * Name of the service.
...@@ -62,11 +70,6 @@ public class GraphBackedMetadataRepositoryService implements MetadataRepositoryS ...@@ -62,11 +70,6 @@ public class GraphBackedMetadataRepositoryService implements MetadataRepositoryS
*/ */
@Override @Override
public void start() throws Exception { public void start() throws Exception {
if (Services.get().isRegistered(TitanGraphService.NAME)) {
graphService = Services.get().getService(TitanGraphService.NAME);
} else {
throw new RuntimeException("graph service is not initialized");
}
} }
/** /**
......
package org.apache.hadoop.metadata.services;
import org.apache.commons.configuration.ConfigurationException;
import com.google.inject.throwingproviders.CheckedProvider;
import com.tinkerpop.blueprints.Graph;
public interface GraphProvider<T extends Graph> extends CheckedProvider<T> {
@Override
T get() throws ConfigurationException;
}
package org.apache.hadoop.metadata.services;
public class GraphServiceConfigurator extends PropertyBasedConfigurator<GraphService> {
private static final String PROPERTY_NAME = "metadata.graph.impl.class";
private static final String DEFAULT_IMPL_CLASS = "no.default.graph.class";
private static final String CONFIG_PATH = "metadata.graph.properties";
public GraphServiceConfigurator() {
super("metadata.graph.propertyName", "metadata.graph.defaultImplClass",
"metadata.graph.configurationPath", PROPERTY_NAME,
DEFAULT_IMPL_CLASS, CONFIG_PATH);
}
}
package org.apache.hadoop.metadata.services;
import java.util.Properties;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
public abstract class PropertyBasedConfigurator<T> {
private final String propertyName;
private final String defaultImplClass;
private final String configurationPath;
PropertyBasedConfigurator(String propertyNameProp, String defaultImplClassProp,
String configurationPathProp, String propertyNameDefaultProp,
String defaultImplClassDefaultProp, String configPathDefaultProp) {
Properties props = System.getProperties();
this.propertyName = props.getProperty(propertyNameProp,
propertyNameDefaultProp);
this.defaultImplClass = props.getProperty(defaultImplClassProp,
defaultImplClassDefaultProp);
this.configurationPath = props.getProperty(configurationPathProp,
configPathDefaultProp);
}
PropertyBasedConfigurator(String propertyNameProp, String defaultImplClassProp,
String configurationPathProp) {
Properties props = System.getProperties();
this.propertyName = props.getProperty(propertyNameProp);
this.defaultImplClass = props.getProperty(defaultImplClassProp);
this.configurationPath = props.getProperty(configurationPathProp);
}
public String getPropertyName() {
return propertyName;
}
public String getDefaultImplClass() {
return defaultImplClass;
}
public String getConfigurationPath() {
return configurationPath;
}
public Configuration getConfiguration() {
String path = getConfigurationPath();
Configuration config = null;
try {
config = new PropertiesConfiguration(path);
} catch (ConfigurationException e) {
config = new PropertiesConfiguration();
}
return config;
}
public String getClassName() {
Configuration config = getConfiguration();
String propName = getPropertyName();
String defaultClass = getDefaultImplClass();
return config.getString(propName, defaultClass);
}
@SuppressWarnings("unchecked")
public Class<? extends T> getImplClass() {
String className = getClassName();
Class<? extends T> ret = null;
try {
ret = (Class<? extends T>) PropertyBasedConfigurator.class
.getClassLoader().loadClass(className);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
return ret;
}
}
package org.apache.hadoop.metadata.services;
import javax.inject.Singleton;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import com.thinkaurelius.titan.core.TitanFactory;
import com.thinkaurelius.titan.core.TitanGraph;
public class TitanGraphProvider implements GraphProvider<TitanGraph> {
private static final String SYSTEM_PROP = "";
private static final String DEFAULT_PATH = "graph.properties";
private final String configPath;
public TitanGraphProvider() {
configPath = System.getProperties().getProperty(SYSTEM_PROP,
DEFAULT_PATH);
}
public Configuration getConfiguration() throws ConfigurationException {
return new PropertiesConfiguration(configPath);
}
@Override
@Singleton
public TitanGraph get() throws ConfigurationException {
TitanGraph graph = null;
Configuration config;
try {
config = getConfiguration();
} catch (ConfigurationException e) {
throw new RuntimeException(e);
}
graph = TitanFactory.open(config);
return graph;
}
}
...@@ -41,109 +41,98 @@ import java.util.Iterator; ...@@ -41,109 +41,98 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import javax.inject.Inject;
/** /**
* Default implementation for Graph service backed by Titan. * Default implementation for Graph service backed by Titan.
*/ */
public class TitanGraphService implements GraphService { public class TitanGraphService implements GraphService {
private static final Logger LOG = LoggerFactory.getLogger(TitanGraphService.class); private static final Logger LOG = LoggerFactory.getLogger(TitanGraphService.class);
public static final String NAME = TitanGraphService.class.getSimpleName(); public static final String NAME = TitanGraphService.class.getSimpleName();
/** /**
* Constant for the configuration property that indicates the prefix. * Constant for the configuration property that indicates the prefix.
*/ */
private static final String METADATA_PREFIX = "metadata.graph."; private static final String INDEXER_PREFIX = "metadata.indexer.vertex.";
private static final String INDEXER_PREFIX = "metadata.indexer.vertex."; private static final List<String> acceptedTypes = Arrays.asList("String", "Int", "Long");
private static final List<String> acceptedTypes = Arrays.asList("String","Int","Long");
private final TitanGraph titanGraph;
private TitanGraph titanGraph;
private Set<String> vertexIndexedKeys; @Inject
private Set<String> edgeIndexedKeys; TitanGraphService(GraphProvider<TitanGraph> graph) throws ConfigurationException {
this.titanGraph = graph.get();
/** // TODO decouple from Service class and run start() here
* Name of the service. // can use a shutdown hook to run the stop() method
* // this.start();
* @return name of the service }
*/
@Override /**
public String getName() { * Name of the service.
return NAME; *
} * @return name of the service
*/
/** @Override
* Starts the service. This method blocks until the service has completely started. public String getName() {
* return NAME;
* @throws Exception }
*/
@Override /**
public void start() throws Exception { * Starts the service. This method blocks until the service has completely
Configuration graphConfig = getConfiguration(); * started.
titanGraph = initializeGraphDB(graphConfig); *
* @throws Exception
createIndicesForVertexKeys(); */
// todo - create Edge Cardinality Constraints @Override
LOG.info("Initialized titanGraph db: {}", titanGraph); public void start() throws Exception {
createIndicesForVertexKeys();
vertexIndexedKeys = getIndexableGraph().getIndexedKeys(Vertex.class); // todo - create Edge Cardinality Constraints
LOG.info("Init vertex property keys: {}", vertexIndexedKeys); LOG.info("Initialized titanGraph db: {}", titanGraph);
edgeIndexedKeys = getIndexableGraph().getIndexedKeys(Edge.class); Set<String> vertexIndexedKeys = getVertexIndexedKeys();
LOG.info("Init edge property keys: {}", edgeIndexedKeys); LOG.info("Init vertex property keys: {}", vertexIndexedKeys);
}
Set<String> edgeIndexedKeys = getEdgeIndexedKeys();
private static Configuration getConfiguration() throws ConfigurationException { LOG.info("Init edge property keys: {}", edgeIndexedKeys);
PropertiesConfiguration configProperties = }
new PropertiesConfiguration("application.properties");
private static Configuration getConfiguration(String filename, String prefix)
Configuration graphConfig = new PropertiesConfiguration(); throws ConfigurationException {
final Iterator<String> iterator = configProperties.getKeys(); PropertiesConfiguration configProperties = new PropertiesConfiguration(
while (iterator.hasNext()) { filename);
String key = iterator.next();
if (key.startsWith(METADATA_PREFIX)) { Configuration graphConfig = new PropertiesConfiguration();
String value = (String) configProperties.getProperty(key);
key = key.substring(METADATA_PREFIX.length()); final Iterator<String> iterator = configProperties.getKeys();
graphConfig.setProperty(key, value); while (iterator.hasNext()) {
} String key = iterator.next();
} if (key.startsWith(prefix)) {
String value = (String) configProperties.getProperty(key);
return graphConfig; key = key.substring(prefix.length());
} graphConfig.setProperty(key, value);
}
private static Configuration getConfiguration(String filename, String prefix) throws ConfigurationException { }
PropertiesConfiguration configProperties =
new PropertiesConfiguration(filename); return graphConfig;
}
Configuration graphConfig = new PropertiesConfiguration();
protected TitanGraph initializeGraphDB(Configuration graphConfig) {
final Iterator<String> iterator = configProperties.getKeys(); LOG.info("Initializing titanGraph db");
while (iterator.hasNext()) { return TitanFactory.open(graphConfig);
String key = iterator.next(); }
if (key.startsWith(prefix)) {
String value = (String) configProperties.getProperty(key); protected void createIndicesForVertexKeys() throws ConfigurationException {
key = key.substring(prefix.length());
graphConfig.setProperty(key, value); if (!titanGraph.getIndexedKeys(Vertex.class).isEmpty()) {
} LOG.info("Indexes already exist for titanGraph");
} return;
}
return graphConfig;
} LOG.info("Indexes do not exist, Creating indexes for titanGraph using indexer.properties.");
protected TitanGraph initializeGraphDB(Configuration graphConfig) { Configuration indexConfig = getConfiguration("indexer.properties",
LOG.info("Initializing titanGraph db"); INDEXER_PREFIX);
return TitanFactory.open(graphConfig);
}
protected void createIndicesForVertexKeys() throws ConfigurationException {
if (!titanGraph.getIndexedKeys(Vertex.class).isEmpty()) {
LOG.info("Indexes already exist for titanGraph");
return;
}
LOG.info("Indexes do not exist, Creating indexes for titanGraph using indexer.properties.");
Configuration indexConfig = getConfiguration("indexer.properties", INDEXER_PREFIX);
TitanManagement mgmt = titanGraph.getManagementSystem(); TitanManagement mgmt = titanGraph.getManagementSystem();
mgmt.buildIndex("mainIndex", Vertex.class).buildMixedIndex("search"); mgmt.buildIndex("mainIndex", Vertex.class).buildMixedIndex("search");
TitanGraphIndex graphIndex = mgmt.getGraphIndex("mainIndex"); TitanGraphIndex graphIndex = mgmt.getGraphIndex("mainIndex");
...@@ -153,95 +142,110 @@ public class TitanGraphService implements GraphService { ...@@ -153,95 +142,110 @@ public class TitanGraphService implements GraphService {
if (!indexConfig.isEmpty()) { if (!indexConfig.isEmpty()) {
// Get a list of property names to iterate through... // Get a list of property names to iterate through...
List<String> propList = new ArrayList<String>(); List<String> propList = new ArrayList<String>();
Iterator<String> it = indexConfig.getKeys("property.name"); Iterator<String> it = indexConfig.getKeys("property.name");
while (it.hasNext()) { while (it.hasNext()) {
propList.add(it.next()); propList.add(it.next());
} }
it = propList.iterator(); it = propList.iterator();
while (it.hasNext()) { while (it.hasNext()) {
// Pull the property name and index, so we can register the name and look up the type. // Pull the property name and index, so we can register the name
// and look up the type.
String prop = it.next().toString(); String prop = it.next().toString();
String index = prop.substring(prop.lastIndexOf(".") + 1); String index = prop.substring(prop.lastIndexOf(".") + 1);
String type = null; String type = null;
prop = indexConfig.getProperty(prop).toString(); prop = indexConfig.getProperty(prop).toString();
// Look up the type for the specified property name. // Look up the type for the specified property name.
if (indexConfig.containsKey("property.type." + index)) { if (indexConfig.containsKey("property.type." + index)) {
type = indexConfig.getProperty("property.type." + index).toString(); type = indexConfig.getProperty("property.type." + index)
.toString();
} else { } else {
throw new ConfigurationException("No type specified for property " + index + " in indexer.properties."); throw new ConfigurationException(
"No type specified for property " + index
+ " in indexer.properties.");
} }
// Is the type submitted one of the approved ones? // Is the type submitted one of the approved ones?
if (!acceptedTypes.contains(type)) { if (!acceptedTypes.contains(type)) {
throw new ConfigurationException("The type provided in indexer.properties for property " + prop + " is not supported. Supported types are: " + acceptedTypes.toString()); throw new ConfigurationException(
"The type provided in indexer.properties for property "
+ prop
+ " is not supported. Supported types are: "
+ acceptedTypes.toString());
} }
// Add the key. // Add the key.
LOG.info("Adding property: " + prop + " to index as type: " + type); LOG.info("Adding property: " + prop + " to index as type: "
mgmt.addIndexKey(graphIndex,mgmt.makePropertyKey(prop).dataType(type.getClass()).make()); + type);
mgmt.addIndexKey(graphIndex, mgmt.makePropertyKey(prop)
} .dataType(type.getClass()).make());
}
mgmt.commit(); mgmt.commit();
LOG.info("Index creation complete."); LOG.info("Index creation complete.");
} }
} }
/** /**
* Stops the service. This method blocks until the service has completely shut down. * Stops the service. This method blocks until the service has completely
*/ * shut down.
@Override */
public void stop() { @Override
if (titanGraph != null) { public void stop() {
titanGraph.shutdown(); if (titanGraph != null) {
} titanGraph.shutdown();
} }
}
/**
* A version of stop() that is designed to be usable in Java7 closure /**
* clauses. * A version of stop() that is designed to be usable in Java7 closure
* Implementation classes MUST relay this directly to {@link #stop()} * clauses. Implementation classes MUST relay this directly to
* * {@link #stop()}
* @throws java.io.IOException never *
* @throws RuntimeException on any failure during the stop operation * @throws java.io.IOException
*/ * never
@Override * @throws RuntimeException
public void close() throws IOException { * on any failure during the stop operation
stop(); */
} @Override
public void close() throws IOException {
@Override stop();
public Graph getBlueprintsGraph() { }
return titanGraph;
} @Override
public Graph getBlueprintsGraph() {
@Override return titanGraph;
public KeyIndexableGraph getIndexableGraph() { }
return titanGraph;
} @Override
public KeyIndexableGraph getIndexableGraph() {
@Override return titanGraph;
public TransactionalGraph getTransactionalGraph() { }
return titanGraph;
} @Override
public TransactionalGraph getTransactionalGraph() {
public TitanGraph getTitanGraph() { return titanGraph;
return titanGraph; }
}
public TitanGraph getTitanGraph() {
@Override return titanGraph;
public Set<String> getVertexIndexedKeys() { }
return vertexIndexedKeys;
} @Override
public Set<String> getVertexIndexedKeys() {
@Override // this must use the graph API instead of setting this value as a class member - it can change after creation
public Set<String> getEdgeIndexedKeys() { return getIndexableGraph().getIndexedKeys(Vertex.class);
return edgeIndexedKeys; }
}
@Override
public Set<String> getEdgeIndexedKeys() {
// this must use the graph API instead of setting this value as a class member - it can change after creation
return getIndexableGraph().getIndexedKeys(Edge.class);
}
} }
package org.apache.hadoop.metadata;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
public abstract class GuiceEnabledTestBase {
/*
* Guice.createInjector() takes your Modules, and returns a new Injector
* instance. Most applications will call this method exactly once, in their
* main() method.
*/
public final Injector injector;
GuiceEnabledTestBase() {
injector = Guice.createInjector();
}
GuiceEnabledTestBase(Module... modules) {
injector = Guice.createInjector(modules);
}
}
package org.apache.hadoop.metadata;
public abstract class RepositoryModuleBaseTest extends GuiceEnabledTestBase {
public RepositoryModuleBaseTest() {
super(new RepositoryMetadataModule());
}
}
package org.apache.hadoop.metadata;
import junit.framework.Assert;
import org.apache.hadoop.metadata.services.GraphService;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Unit test for Guice injector service loading
*/
public class RepositoryServiceLoadingTest extends GuiceEnabledTestBase {
public RepositoryServiceLoadingTest() {
super(new RepositoryMetadataModule());
}
@BeforeClass
public void setUp() throws Exception {
}
@AfterClass
public void tearDown() throws Exception {
}
@Test
public void testGetGraphService() throws Exception {
/*
* Now that we've got the injector, we can build objects.
*/
GraphService gs = injector.getInstance(GraphService.class);
Assert.assertNotNull(gs);
}
}
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.services;
import org.apache.hadoop.metadata.service.Services; import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.metadata.RepositoryModuleBaseTest;
import org.json.simple.JSONValue; import org.json.simple.JSONValue;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.AfterClass; import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.util.HashMap; public class GraphBackedMetadataRepositoryServiceTest extends RepositoryModuleBaseTest {
import java.util.List;
import java.util.Map;
public class GraphBackedMetadataRepositoryServiceTest {
private static final String ENTITY_NAME = "clicks-table"; private static final String ENTITY_NAME = "clicks-table";
private static final String ENTITY_TYPE = "hive-table"; private static final String ENTITY_TYPE = "hive-table";
private static final String DATABASE_NAME = "ads"; private static final String DATABASE_NAME = "ads";
private static final String TABLE_NAME = "clicks-table"; private static final String TABLE_NAME = "clicks-table";
private TitanGraphService titanGraphService; private TitanGraphService titanGraphService;
private GraphBackedMetadataRepositoryService repositoryService; private GraphBackedMetadataRepositoryService repositoryService;
@BeforeClass @BeforeClass
public void setUp() throws Exception { public void setUp() throws Exception {
titanGraphService = new TitanGraphService(); titanGraphService = super.injector.getInstance(TitanGraphService.class);
titanGraphService.start(); titanGraphService.start();
Services.get().register(titanGraphService); //Services.get().register(titanGraphService);
repositoryService = new GraphBackedMetadataRepositoryService(); repositoryService = super.injector.getInstance(GraphBackedMetadataRepositoryService.class);
repositoryService.start(); repositoryService.start();
Services.get().register(repositoryService); //Services.get().register(repositoryService);
} }
@AfterClass @AfterClass
public void tearDown() throws Exception { public void tearDown() throws Exception {
Services.get().getService(GraphBackedMetadataRepositoryService.NAME).close(); /*Services.get().getService(GraphBackedMetadataRepositoryService.NAME)
Services.get().getService(TitanGraphService.NAME).close(); .close();
Services.get().reset(); Services.get().getService(TitanGraphService.NAME).close();
} Services.get().reset();*/
}
@Test @Test
public void testGetName() throws Exception { public void testGetName() throws Exception {
Assert.assertEquals(GraphBackedMetadataRepositoryService.NAME, Assert.assertEquals(GraphBackedMetadataRepositoryService.NAME,
GraphBackedMetadataRepositoryService.class.getSimpleName()); GraphBackedMetadataRepositoryService.class.getSimpleName());
Assert.assertEquals(repositoryService.getName(), GraphBackedMetadataRepositoryService.NAME); Assert.assertEquals(repositoryService.getName(),
} GraphBackedMetadataRepositoryService.NAME);
}
@Test @Test
public void testSubmitEntity() throws Exception { public void testSubmitEntity() throws Exception {
String entityStream = getTestEntityJSON(); String entityStream = getTestEntityJSON();
String guid = repositoryService.submitEntity(entityStream, ENTITY_TYPE); String guid = repositoryService.submitEntity(entityStream, ENTITY_TYPE);
Assert.assertNotNull(guid); Assert.assertNotNull(guid);
} }
private String getTestEntityJSON() { private String getTestEntityJSON() {
Map<String, String> props = new HashMap<>(); Map<String, String> props = new HashMap<>();
props.put("entityName", ENTITY_NAME); props.put("entityName", ENTITY_NAME);
props.put("entityType", ENTITY_TYPE); props.put("entityType", ENTITY_TYPE);
props.put("database", DATABASE_NAME); props.put("database", DATABASE_NAME);
props.put("table", TABLE_NAME); props.put("table", TABLE_NAME);
return JSONValue.toJSONString(props); return JSONValue.toJSONString(props);
} }
@Test (dependsOnMethods = "testSubmitEntity") @Test(dependsOnMethods = "testSubmitEntity")
public void testGetEntityDefinition() throws Exception { public void testGetEntityDefinition() throws Exception {
String entity = repositoryService.getEntityDefinition(ENTITY_NAME, ENTITY_TYPE); String entity = repositoryService.getEntityDefinition(ENTITY_NAME,
Map<String, String> entityProperties = ENTITY_TYPE);
(Map<String, String>) JSONValue.parseWithException(entity); Map<String, String> entityProperties = (Map<String, String>) JSONValue
Assert.assertNotNull(entityProperties.get("guid")); .parseWithException(entity);
Assert.assertEquals(entityProperties.get("entityName"), ENTITY_NAME); Assert.assertNotNull(entityProperties.get("guid"));
Assert.assertEquals(entityProperties.get("entityType"), ENTITY_TYPE); Assert.assertEquals(entityProperties.get("entityName"), ENTITY_NAME);
Assert.assertEquals(entityProperties.get("database"), DATABASE_NAME); Assert.assertEquals(entityProperties.get("entityType"), ENTITY_TYPE);
Assert.assertEquals(entityProperties.get("table"), TABLE_NAME); Assert.assertEquals(entityProperties.get("database"), DATABASE_NAME);
} Assert.assertEquals(entityProperties.get("table"), TABLE_NAME);
}
@Test @Test
public void testGetEntityDefinitionNonExistent() throws Exception { public void testGetEntityDefinitionNonExistent() throws Exception {
String entity = repositoryService.getEntityDefinition("blah", "blah"); String entity = repositoryService.getEntityDefinition("blah", "blah");
Assert.assertNull(entity); Assert.assertNull(entity);
} }
@Test @Test
public void testGetEntityList() throws Exception { public void testGetEntityList() throws Exception {
List<String> entityList = repositoryService.getEntityList(ENTITY_TYPE); List<String> entityList = repositoryService.getEntityList(ENTITY_TYPE);
Assert.assertNotNull(entityList); Assert.assertNotNull(entityList);
Assert.assertEquals(entityList.size(), 0); // as this is not implemented yet Assert.assertEquals(entityList.size(), 0); // as this is not implemented
} // yet
}
@Test (expectedExceptions = RuntimeException.class) /*@Test(expectedExceptions = RuntimeException.class)
public void testStartWithOutGraphServiceRegistration() throws Exception { public void testStartWithOutGraphServiceRegistration() throws Exception {
try { try {
Services.get().reset(); //Services.get().reset();
GraphBackedMetadataRepositoryService repositoryService = new GraphBackedMetadataRepositoryService repositoryService = new GraphBackedMetadataRepositoryService();
GraphBackedMetadataRepositoryService(); repositoryService.start();
repositoryService.start(); Assert.fail("This should have thrown an exception");
Assert.fail("This should have thrown an exception"); } finally {
} finally { //Services.get().register(titanGraphService);
Services.get().register(titanGraphService); //Services.get().register(repositoryService);
Services.get().register(repositoryService); }
} }*/
}
} }
package org.apache.hadoop.metadata.services; package org.apache.hadoop.metadata.services;
import org.apache.hadoop.metadata.RepositoryModuleBaseTest;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.AfterClass; import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
...@@ -8,13 +9,14 @@ import org.testng.annotations.Test; ...@@ -8,13 +9,14 @@ import org.testng.annotations.Test;
/** /**
* Unit test for TitanGraphService. * Unit test for TitanGraphService.
*/ */
public class TitanGraphServiceTest { public class TitanGraphServiceTest extends RepositoryModuleBaseTest {
private TitanGraphService titanGraphService; private TitanGraphService titanGraphService;
@BeforeClass @BeforeClass
public void setUp() throws Exception { public void setUp() throws Exception {
titanGraphService = new TitanGraphService(); titanGraphService = super.injector.getInstance(TitanGraphService.class);
//titanGraphService = new TitanGraphService();
titanGraphService.start(); titanGraphService.start();
} }
......
storage.backend=inmemory
# Graph Search Index
index.search.backend=elasticsearch
index.search.directory=target/data/es
index.search.elasticsearch.client-only=false
index.search.elasticsearch.local-mode=true
\ No newline at end of file
metadata.graph.impl.class=org.apache.hadoop.metadata.services.TitanGraphService
\ No newline at end of file
...@@ -127,6 +127,16 @@ ...@@ -127,6 +127,16 @@
<groupId>org.testng</groupId> <groupId>org.testng</groupId>
<artifactId>testng</artifactId> <artifactId>testng</artifactId>
</dependency> </dependency>
<dependency>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-servlet</artifactId>
<version>3.0</version>
</dependency>
<dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-guice</artifactId>
<version>1.18.3</version>
</dependency>
</dependencies> </dependencies>
<build> <build>
......
package org.apache.hadoop.metadata.web.listeners;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.servlet.GuiceServletContextListener;
import com.sun.jersey.api.core.PackagesResourceConfig;
import com.sun.jersey.guice.JerseyServletModule;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
public class GuiceServletConfig extends GuiceServletContextListener {
private static final String GUICE_CTX_PARAM = "guice.packages";
@Override
protected Injector getInjector() {
/*
* More information on this can be found here:
* https://jersey.java.net/nonav/apidocs/1.11/contribs/jersey-guice/com/sun/jersey/guice/spi/container/servlet/package-summary.html
*/
return Guice.createInjector(
new RepositoryMetadataModule(),
new JerseyServletModule() {
@Override
protected void configureServlets() {
String packages = getServletContext().getInitParameter(GUICE_CTX_PARAM);
Map<String, String> params = new HashMap<String, String>();
params.put(PackagesResourceConfig.PROPERTY_PACKAGES, packages);
serve("/*").with(GuiceContainer.class, params);
}
});
}
}
...@@ -23,6 +23,7 @@ import org.apache.hadoop.metadata.web.util.Servlets; ...@@ -23,6 +23,7 @@ import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import javax.inject.Singleton;
import javax.ws.rs.GET; import javax.ws.rs.GET;
import javax.ws.rs.Path; import javax.ws.rs.Path;
import javax.ws.rs.Produces; import javax.ws.rs.Produces;
...@@ -34,6 +35,7 @@ import javax.ws.rs.core.Response; ...@@ -34,6 +35,7 @@ import javax.ws.rs.core.Response;
* Jersey Resource for admin operations. * Jersey Resource for admin operations.
*/ */
@Path("admin") @Path("admin")
@Singleton
public class AdminResource { public class AdminResource {
@GET @GET
......
...@@ -18,16 +18,11 @@ ...@@ -18,16 +18,11 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import com.google.common.base.Preconditions; import java.io.IOException;
import org.apache.commons.io.IOUtils; import java.io.StringWriter;
import org.apache.hadoop.metadata.service.Services;
import org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService;
import org.apache.hadoop.metadata.services.MetadataRepositoryService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.json.simple.JSONValue;
import org.json.simple.parser.ParseException;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes; import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE; import javax.ws.rs.DELETE;
...@@ -42,8 +37,15 @@ import javax.ws.rs.WebApplicationException; ...@@ -42,8 +37,15 @@ import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context; import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.StringWriter; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.metadata.services.MetadataRepositoryService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.json.simple.JSONValue;
import org.json.simple.parser.ParseException;
import com.google.common.base.Preconditions;
/** /**
* Entity management operations as REST API. * Entity management operations as REST API.
...@@ -52,15 +54,14 @@ import java.io.StringWriter; ...@@ -52,15 +54,14 @@ import java.io.StringWriter;
* of the Type they correspond with. * of the Type they correspond with.
*/ */
@Path("entities") @Path("entities")
@Singleton
public class EntityResource { public class EntityResource {
private MetadataRepositoryService repositoryService; private final MetadataRepositoryService repositoryService;
public EntityResource() { @Inject
repositoryService = Services.get().getService(GraphBackedMetadataRepositoryService.NAME); public EntityResource(MetadataRepositoryService repositoryService) {
if (repositoryService == null) { this.repositoryService = repositoryService;
throw new RuntimeException("graph service is not initialized");
}
} }
@POST @POST
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import javax.inject.Singleton;
import javax.ws.rs.Path; import javax.ws.rs.Path;
/** /**
...@@ -30,5 +31,6 @@ import javax.ws.rs.Path; ...@@ -30,5 +31,6 @@ import javax.ws.rs.Path;
* 'search': find entities generated by Hive processes or that were generated by Sqoop, etc. * 'search': find entities generated by Hive processes or that were generated by Sqoop, etc.
*/ */
@Path("discovery") @Path("discovery")
@Singleton
public class MetadataDiscoveryResource { public class MetadataDiscoveryResource {
} }
...@@ -18,25 +18,12 @@ ...@@ -18,25 +18,12 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import com.tinkerpop.blueprints.Direction; import java.util.HashMap;
import com.tinkerpop.blueprints.Edge; import java.util.Map;
import com.tinkerpop.blueprints.Element; import java.util.Set;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.VertexQuery;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONMode;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONUtility;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.service.Services;
import org.apache.hadoop.metadata.services.GraphService;
import org.apache.hadoop.metadata.services.TitanGraphService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.DefaultValue; import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET; import javax.ws.rs.GET;
import javax.ws.rs.Path; import javax.ws.rs.Path;
...@@ -46,9 +33,24 @@ import javax.ws.rs.QueryParam; ...@@ -46,9 +33,24 @@ import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException; import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.util.HashMap;
import java.util.Map; import org.apache.commons.lang.StringUtils;
import java.util.Set; import org.apache.hadoop.metadata.services.GraphService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Element;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.VertexQuery;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONMode;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONUtility;
/** /**
* Jersey Resource for lineage metadata operations. * Jersey Resource for lineage metadata operations.
...@@ -59,22 +61,25 @@ import java.util.Set; ...@@ -59,22 +61,25 @@ import java.util.Set;
* for accessing the backend graph. * for accessing the backend graph.
*/ */
@Path("graph") @Path("graph")
@Singleton
public class RexsterGraphResource { public class RexsterGraphResource {
private static final Logger LOG = LoggerFactory.getLogger(RexsterGraphResource.class); private static final Logger LOG = LoggerFactory.getLogger(RexsterGraphResource.class);
public static final String RESULTS = "results"; public static final String RESULTS = "results";
public static final String TOTAL_SIZE = "totalSize"; public static final String TOTAL_SIZE = "totalSize";
private GraphService graphService; private final GraphService graphService;
public RexsterGraphResource() { @Inject
graphService = Services.get().getService(TitanGraphService.NAME); public RexsterGraphResource(GraphService graphService) {
this.graphService = graphService;
/*graphService = Services.get().getService(TitanGraphService.NAME);
if (graphService == null) { if (graphService == null) {
throw new WebApplicationException(Response throw new WebApplicationException(Response
.status(Response.Status.INTERNAL_SERVER_ERROR) .status(Response.Status.INTERNAL_SERVER_ERROR)
.tag("graph service is not initialized") .tag("graph service is not initialized")
.build()); .build());
} }*/
} }
protected Graph getGraph() { protected Graph getGraph() {
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*; import javax.ws.rs.*;
import javax.ws.rs.core.Context; import javax.ws.rs.core.Context;
...@@ -31,6 +32,7 @@ import javax.ws.rs.core.Response; ...@@ -31,6 +32,7 @@ import javax.ws.rs.core.Response;
* e.g. a Hive table * e.g. a Hive table
*/ */
@Path("types") @Path("types")
@Singleton
public class TypesResource { public class TypesResource {
@POST @POST
......
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<!-- <!-- ~ Licensed to the Apache Software Foundation (ASF) under one ~ or more
~ Licensed to the Apache Software Foundation (ASF) under one contributor license agreements. See the NOTICE file ~ distributed with this
~ or more contributor license agreements. See the NOTICE file work for additional information ~ regarding copyright ownership. The ASF
~ distributed with this work for additional information licenses this file ~ to you under the Apache License, Version 2.0 (the ~
~ regarding copyright ownership. The ASF licenses this file "License"); you may not use this file except in compliance ~ with the License.
~ to you under the Apache License, Version 2.0 (the You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0
~ "License"); you may not use this file except in compliance ~ ~ Unless required by applicable law or agreed to in writing, software ~
~ with the License. You may obtain a copy of the License at distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT
~ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the
~ http://www.apache.org/licenses/LICENSE-2.0 License for the specific language governing permissions and ~ limitations
~ under the License. -->
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" <!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
"http://java.sun.com/dtd/web-app_2_3.dtd"> "http://java.sun.com/dtd/web-app_2_3.dtd">
<web-app> <web-app>
<display-name>Apache Metadata Placeholder</display-name>
<description>Apache Metadata Placeholder</description>
<context-param>
<param-name>guice.packages</param-name>
<param-value>org.apache.hadoop.metadata.web.resources,org.apache.hadoop.metadata.web.params</param-value>
</context-param>
<!--
More information can be found here:
https://jersey.java.net/nonav/apidocs/1.11/contribs/jersey-guice/com/sun/jersey/guice/spi/container/servlet/package-summary.html
-->
<filter>
<filter-name>guiceFilter</filter-name>
<filter-class>com.google.inject.servlet.GuiceFilter</filter-class>
</filter>
<display-name>Apache Falcon Placeholder</display-name> <filter-mapping>
<description>Apache Falcon Placeholder</description> <filter-name>guiceFilter</filter-name>
<url-pattern>/*</url-pattern>
</filter-mapping>
<filter> <listener>
<filter-name>audit</filter-name> <listener-class>org.apache.hadoop.metadata.web.listeners.GuiceServletConfig</listener-class>
<filter-class>org.apache.hadoop.metadata.web.filters.AuditFilter</filter-class> </listener>
</filter>
<filter> <!--
<filter-name>authentication</filter-name> <filter>
<filter-class>org.apache.hadoop.metadata.web.filters.AuthenticationFilter</filter-class> <filter-name>audit</filter-name>
</filter> <filter-class>org.apache.hadoop.metadata.web.filters.AuditFilter</filter-class>
</filter>
<filter-mapping> <filter>
<filter-name>audit</filter-name> <filter-name>authentication</filter-name>
<servlet-name>MetadataRESTApi</servlet-name> <filter-class>org.apache.hadoop.metadata.web.filters.AuthenticationFilter</filter-class>
</filter-mapping> </filter>
<filter-mapping> <filter-mapping>
<filter-name>authentication</filter-name> <filter-name>audit</filter-name>
<servlet-name>MetadataRESTApi</servlet-name> <servlet-name>MetadataRESTApi</servlet-name>
</filter-mapping> </filter-mapping>
<listener> <filter-mapping>
<listener-class>org.apache.hadoop.metadata.web.listeners.ApplicationStartupListener</listener-class> <filter-name>authentication</filter-name>
</listener> <servlet-name>MetadataRESTApi</servlet-name>
</filter-mapping>
<servlet> <listener>
<servlet-name>MetadataRESTApi</servlet-name> <listener-class>org.apache.hadoop.metadata.web.listeners.ApplicationStartupListener</listener-class>
<servlet-class>com.sun.jersey.spi.container.servlet.ServletContainer</servlet-class> </listener>
<init-param>
<param-name>com.sun.jersey.config.property.resourceConfigClass</param-name>
<param-value>com.sun.jersey.api.core.PackagesResourceConfig</param-value>
</init-param>
<init-param>
<param-name>com.sun.jersey.config.property.packages</param-name>
<param-value>
org.apache.hadoop.metadata.web.resources,org.apache.hadoop.metadata.web.params
</param-value>
</init-param>
<load-on-startup>1</load-on-startup>
</servlet>
<servlet-mapping> <servlet>
<servlet-name>MetadataRESTApi</servlet-name> <servlet-name>MetadataRESTApi</servlet-name>
<url-pattern>/api/metadata/*</url-pattern> <servlet-class>com.sun.jersey.spi.container.servlet.ServletContainer</servlet-class>
</servlet-mapping> <init-param>
<param-name>com.sun.jersey.config.property.resourceConfigClass</param-name>
<param-value>com.sun.jersey.api.core.PackagesResourceConfig</param-value>
</init-param>
<init-param>
<param-name>com.sun.jersey.config.property.packages</param-name>
<param-value>
org.apache.hadoop.metadata.web.resources,org.apache.hadoop.metadata.web.params
</param-value>
</init-param>
<load-on-startup>1</load-on-startup>
</servlet>
<servlet-mapping>
<servlet-name>MetadataRESTApi</servlet-name>
<url-pattern>/api/metadata/*</url-pattern>
</servlet-mapping>
-->
</web-app> </web-app>
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
application.services=org.apache.hadoop.metadata.services.TitanGraphService,\
org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService
# Graph implementation
#metadata.graph.blueprints.graph=com.thinkaurelius.titan.core.TitanFactory
# Graph Storage
metadata.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=target/data/berkeley
# Graph Search Index
metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=target/data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
metadata.enableTLS=false
storage.backend=inmemory
# Graph Search Index
index.search.backend=elasticsearch
index.search.directory=target/data/es
index.search.elasticsearch.client-only=false
index.search.elasticsearch.local-mode=true
\ No newline at end of file
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is formatted as follows:
# metadata.indexer.vertex.property.name.<index>=<Property Name>
# metadata.indexer.vertex.property.type.<index>=<Data Type>
metadata.indexer.vertex.property.name.0=DESC
metadata.indexer.vertex.property.type.0=String
metadata.indexer.vertex.property.name.1=DB_LOCATION_URI
metadata.indexer.vertex.property.type.1=String
metadata.indexer.vertex.property.name.2=NAME
metadata.indexer.vertex.property.type.2=String
metadata.indexer.vertex.property.name.3=OWNER_NAME
metadata.indexer.vertex.property.type.3=String
metadata.indexer.vertex.property.name.4=TBL_NAME
metadata.indexer.vertex.property.type.4=String
metadata.indexer.vertex.property.name.5=COMMENT
metadata.indexer.vertex.property.type.5=String
metadata.indexer.vertex.property.name.6=COLUMN_NAME
metadata.indexer.vertex.property.type.6=String
metadata.indexer.vertex.property.name.7=TYPE_NAME
metadata.indexer.vertex.property.type.7=String
metadata.graph.impl.class=org.apache.hadoop.metadata.services.TitanGraphService
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment