Commit 58288069 by Jon Maron

Merge branch 'master' into jmaron

parents 26e699dc 42578ccc
......@@ -222,6 +222,10 @@
<name>metadata.log.dir</name>
<value>${project.build.directory}/logs</value>
</systemProperty>
<systemProperty>
<name>metadata.properties.location</name>
<value>addons/hive-bridge/src/test/resources</value>
</systemProperty>
</systemProperties>
<stopKey>metadata-stop</stopKey>
<stopPort>41001</stopPort>
......
......@@ -77,7 +77,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
......
......@@ -18,19 +18,23 @@
######### Graph Database Configs #########
# Graph Storage
metadata.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=./target/data/berkeley
metadata.graph.storage.backend=inmemory
# Graph Search Index
metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=./target/data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.backend=lucene
metadata.graph.index.search.directory=target/data/lucene
######### Hive Lineage Configs #########
metadata.lineage.hive.table.type.name=hive_table
metadata.lineage.hive.column.type.name=hive_column
metadata.lineage.hive.table.column.name=columns
metadata.lineage.hive.process.type.name=hive_process
metadata.lineage.hive.process.inputs.name=inputTables
metadata.lineage.hive.process.outputs.name=outputTables
######### Security Properties #########
# SSL config
metadata.enableTLS=false
######### Security Properties #########
......@@ -78,7 +78,7 @@
<jetty.version>6.1.26</jetty.version>
<jersey.version>1.9</jersey.version>
<tinkerpop.version>2.5.0</tinkerpop.version>
<titan.version>0.5.3</titan.version>
<titan.version>0.5.4</titan.version>
<hadoop.version>2.6.0</hadoop.version>
<!-- scala versions -->
......@@ -655,7 +655,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>2.16</version>
<version>2.18.1</version>
</plugin>
<plugin>
......@@ -800,12 +800,12 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.16</version>
<version>2.18.1</version>
<configuration>
<!--<skipTests>true</skipTests>-->
<forkMode>always</forkMode>
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<groups></groups>
<argLine>-Djava.awt.headless=true</argLine>
</configuration>
<dependencies>
<dependency>
......@@ -824,16 +824,17 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>2.16</version>
<version>2.18.1</version>
<configuration>
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<argLine>-Dproject.version=${project.version}
<argLine>-Djava.awt.headless=true -Dproject.version=${project.version}
-Dhadoop.tmp.dir=${project.build.directory}/tmp-hadoop-${user.name}
-Xmx1024m -XX:MaxPermSize=512m
</argLine>
<parallel>none</parallel>
<reuseForks>false</reuseForks>
<forkCount>1</forkCount>
<threadCount>1</threadCount>
<threadCount>5</threadCount>
</configuration>
<executions>
<execution>
......
......@@ -21,10 +21,17 @@ package org.apache.hadoop.metadata;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import java.io.File;
public class PropertiesUtil {
private static final String APPLICATION_PROPERTIES = "/application.properties";
private static final String APPLICATION_PROPERTIES = "application.properties";
public static final PropertiesConfiguration getApplicationProperties() throws ConfigurationException {
return new PropertiesConfiguration(PropertiesUtil.class.getResource(APPLICATION_PROPERTIES));
String proprtiesLocation = System.getProperty("metadata.properties.location");
if (proprtiesLocation == null) {
return new PropertiesConfiguration(PropertiesUtil.class.getResource("/" + APPLICATION_PROPERTIES));
} else {
return new PropertiesConfiguration(new File(proprtiesLocation, APPLICATION_PROPERTIES));
}
}
}
......@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.discovery;
import org.apache.hadoop.metadata.MetadataException;
import java.util.List;
import java.util.Map;
......
......@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.discovery;
import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.discovery.graph.DefaultGraphPersistenceStrategy;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.query.Expressions;
......@@ -57,7 +58,7 @@ public class HiveLineageService implements LineageService {
static {
// todo - externalize this using type system - dog food
try {
PropertiesConfiguration conf = new PropertiesConfiguration("application.properties");
PropertiesConfiguration conf = PropertiesUtil.getApplicationProperties();
HIVE_TABLE_TYPE_NAME =
conf.getString("metadata.lineage.hive.table.type.name", "hive_table");
HIVE_TABLE_COLUMNS_ATTRIBUTE_NAME =
......
......@@ -18,13 +18,13 @@
package org.apache.hadoop.metadata.discovery.graph;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.TitanIndexQuery;
import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex;
import com.thinkaurelius.titan.core.attribute.Text;
import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.DiscoveryService;
import org.apache.hadoop.metadata.query.Expressions;
......@@ -37,6 +37,9 @@ import org.apache.hadoop.metadata.query.QueryProcessor;
import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.util.Either;
......@@ -49,6 +52,7 @@ import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
......@@ -72,23 +76,36 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
}
//Refer http://s3.thinkaurelius.com/docs/titan/0.5.0/index-backends.html for indexed query
//http://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query
// .html#query-string-syntax for query syntax
@Override
public String searchByFullText(String query) throws DiscoveryException {
Iterator iterator = titanGraph.query()
.has(Constants.ENTITY_TEXT_PROPERTY_KEY, Text.CONTAINS, query)
.vertices()
.iterator();
JsonArray results = new JsonArray();
while (iterator.hasNext()) {
Vertex vertex = (Vertex) iterator.next();
JsonObject row = new JsonObject();
row.addProperty("guid", vertex.<String>getProperty(Constants.GUID_PROPERTY_KEY));
row.addProperty("typeName", vertex.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY));
results.add(row);
String graphQuery = String.format("v.%s:(%s)", Constants.ENTITY_TEXT_PROPERTY_KEY, query);
LOG.debug("Full text query: {}", graphQuery);
Iterator<TitanIndexQuery.Result<Vertex>> results =
titanGraph.indexQuery(Constants.FULLTEXT_INDEX, graphQuery).vertices().iterator();
JSONArray response = new JSONArray();
while (results.hasNext()) {
TitanIndexQuery.Result<Vertex> result = results.next();
Vertex vertex = result.getElement();
JSONObject row = new JSONObject();
String guid = vertex.getProperty(Constants.GUID_PROPERTY_KEY);
if (guid != null) { //Filter non-class entities
try {
row.put("guid", guid);
row.put("typeName", vertex.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY));
row.put("score", result.getScore());
} catch (JSONException e) {
LOG.error("Unable to create response", e);
throw new DiscoveryException("Unable to create response");
}
response.put(row);
}
}
JsonObject response = new JsonObject();
response.addProperty("query", query);
response.add("results", results);
return response.toString();
}
......
......@@ -54,7 +54,6 @@ public final class Constants {
* search backing index name.
*/
public static final String BACKING_INDEX = "search";
public static final String INDEX_NAME = "metadata";
/**
* search backing index name for vertex keys.
......@@ -66,6 +65,8 @@ public final class Constants {
*/
public static final String EDGE_INDEX = "edge_index";
public static final String FULLTEXT_INDEX = "fulltext_index";
private Constants() {
}
}
......@@ -23,6 +23,7 @@ import com.thinkaurelius.titan.core.EdgeLabel;
import com.thinkaurelius.titan.core.Order;
import com.thinkaurelius.titan.core.PropertyKey;
import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.schema.Mapping;
import com.thinkaurelius.titan.core.schema.TitanGraphIndex;
import com.thinkaurelius.titan.core.schema.TitanManagement;
import com.tinkerpop.blueprints.Direction;
......@@ -91,11 +92,11 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
// create a composite and mixed index for traitNames since it can be combined with other
// keys. Traits must be a set and not a list.
createCompositeAndMixedIndex(Constants.TRAIT_NAMES_INDEX,
Constants.TRAIT_NAMES_PROPERTY_KEY, String.class, false, Cardinality.SET);
createCompositeAndMixedIndex(Constants.TRAIT_NAMES_INDEX, Constants.TRAIT_NAMES_PROPERTY_KEY, String.class,
false, Cardinality.SET);
// Index for full text search
createVertexMixedIndex(Constants.ENTITY_TEXT_PROPERTY_KEY, String.class);
createFullTextIndex();
//Indexes for graph backed type system store
createTypeStoreIndexes();
......@@ -103,19 +104,27 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
LOG.info("Index creation for global keys complete.");
}
private void createFullTextIndex() {
TitanManagement management = titanGraph.getManagementSystem();
PropertyKey fullText =
management.makePropertyKey(Constants.ENTITY_TEXT_PROPERTY_KEY).dataType(String.class).make();
management.buildIndex(Constants.FULLTEXT_INDEX, Vertex.class)
.addKey(fullText, com.thinkaurelius.titan.core.schema.Parameter.of("mapping", Mapping.TEXT))
.buildMixedIndex(Constants.BACKING_INDEX);
management.commit();
LOG.info("Created mixed index for {}", Constants.ENTITY_TEXT_PROPERTY_KEY);
}
private void createTypeStoreIndexes() {
//Create unique index on typeName
createCompositeIndex(Constants.TYPENAME_PROPERTY_KEY, Constants.TYPENAME_PROPERTY_KEY, String.class,
true, Cardinality.SINGLE);
//Create index on vertex type + typeName
//todo doesn't work, review
TitanManagement management = titanGraph.getManagementSystem();
PropertyKey vertexType = management.makePropertyKey(Constants.VERTEX_TYPE_PROPERTY_KEY).dataType(String.class).make();
PropertyKey typeName = management.getPropertyKey(Constants.TYPENAME_PROPERTY_KEY);
management.buildIndex("byTypeName", Vertex.class).addKey(vertexType).addKey(typeName).buildCompositeIndex();
management.commit();
LOG.debug("Created composite index on {} and {}", Constants.VERTEX_TYPE_PROPERTY_KEY, Constants.TYPENAME_PROPERTY_KEY);
//create index on vertex type
createCompositeIndex(Constants.VERTEX_TYPE_PROPERTY_KEY, Constants.VERTEX_TYPE_PROPERTY_KEY, String.class,
false, Cardinality.SINGLE);
}
/**
......
......@@ -219,6 +219,7 @@ public class GraphBackedTypeStore implements ITypeStore {
Vertex vertex = (Vertex) vertices.next();
DataTypes.TypeCategory typeCategory = vertex.getProperty(Constants.TYPE_CATEGORY_PROPERTY_KEY);
String typeName = vertex.getProperty(Constants.TYPENAME_PROPERTY_KEY);
LOG.info("Restoring type {}.{}", typeCategory, typeName);
switch(typeCategory) {
case ENUM:
enums.add(getEnumType(vertex));
......@@ -297,8 +298,7 @@ public class GraphBackedTypeStore implements ITypeStore {
private Vertex findVertex(DataTypes.TypeCategory category, String typeName) {
LOG.debug("Finding vertex for {}.{}", category, typeName);
Iterator results = titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE)
.has(Constants.TYPENAME_PROPERTY_KEY, typeName).vertices().iterator();
Iterator results = titanGraph.query().has(Constants.TYPENAME_PROPERTY_KEY, typeName).vertices().iterator();
Vertex vertex = null;
if (results != null && results.hasNext()) {
//There should be just one vertex with the given typeName
......
......@@ -292,32 +292,4 @@ public class GraphBackedDiscoveryServiceTest {
Assert.assertNotEquals(name, "null");
}
}
@Test
public void testFullTextSearch() throws Exception {
//person in hr department whose name is john
String response = discoveryService.searchByFullText("john hr");
Assert.assertNotNull(response);
JSONObject jsonResponse = new JSONObject(response);
JSONArray results = jsonResponse.getJSONArray("results");
Assert.assertEquals(results.length(), 1);
JSONObject row = (JSONObject) results.get(0);
Assert.assertEquals(row.get("typeName"), "Person");
//person in hr department who lives in santa clara
response = discoveryService.searchByFullText("hr santa clara");
Assert.assertNotNull(response);
jsonResponse = new JSONObject(response);
results = jsonResponse.getJSONArray("results");
Assert.assertEquals(results.length(), 1);
row = (JSONObject) results.get(0);
Assert.assertEquals(row.get("typeName"), "Manager");
//search for hr should return - hr department and its 2 employess
response = discoveryService.searchByFullText("hr");
Assert.assertNotNull(response);
jsonResponse = new JSONObject(response);
results = jsonResponse.getJSONArray("results");
Assert.assertEquals(results.length(), 3);
}
}
\ No newline at end of file
......@@ -25,6 +25,7 @@ import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.TestUtils;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
......@@ -43,6 +44,8 @@ import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Guice;
......@@ -79,6 +82,9 @@ public class GraphBackedMetadataRepositoryTest {
@Inject
private GraphBackedMetadataRepository repositoryService;
@Inject
private GraphBackedDiscoveryService discoveryService;
private TypeSystem typeSystem;
private String guid;
......@@ -138,8 +144,8 @@ public class GraphBackedMetadataRepositoryTest {
@Test (dependsOnMethods = "testSubmitEntity")
public void testGetTraitLabel() throws Exception {
Assert.assertEquals(repositoryService.getTraitLabel(
typeSystem.getDataType(ClassType.class, TABLE_TYPE), CLASSIFICATION),
Assert.assertEquals(repositoryService.getTraitLabel(typeSystem.getDataType(ClassType.class, TABLE_TYPE),
CLASSIFICATION),
TABLE_TYPE + "." + CLASSIFICATION);
}
......@@ -297,8 +303,7 @@ public class GraphBackedMetadataRepositoryTest {
Id expected = new Id(guid,
tableVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY), TABLE_TYPE);
Assert.assertEquals(repositoryService.getIdFromVertex(TABLE_TYPE, tableVertex),
expected);
Assert.assertEquals(repositoryService.getIdFromVertex(TABLE_TYPE, tableVertex), expected);
}
@Test (dependsOnMethods = "testCreateEntity")
......@@ -307,6 +312,49 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertEquals(repositoryService.getTypeName(tableVertex), TABLE_TYPE);
}
/**
* Full text search requires GraphBackedSearchIndexer, and GraphBackedSearchIndexer can't be enabled in
* GraphBackedDiscoveryServiceTest because of its test data. So, test for full text search is in
* GraphBackedMetadataRepositoryTest:(
*/
@Test(dependsOnMethods = "testSubmitEntity")
public void testFullTextSearch() throws Exception {
//todo fix this
//Weird: with lucene, the test passes without sleep
//but with elasticsearch, doesn't work without sleep. why??
long sleepInterval = 1000;
//person in hr department whose name is john
Thread.sleep(sleepInterval);
String response = discoveryService.searchByFullText("john");
Assert.assertNotNull(response);
JSONArray results = new JSONArray(response);
System.out.println("Found the following results");
for (int i = 0 ; i < results.length(); i++) {
JSONObject myrow = results.getJSONObject(i);
System.out.println(myrow.toString());
}
Assert.assertEquals(results.length(), 1);
JSONObject row = (JSONObject) results.get(0);
Assert.assertEquals(row.get("typeName"), "Person");
//person in hr department who lives in santa clara
response = discoveryService.searchByFullText("hr AND santa AND clara");
Assert.assertNotNull(response);
results = new JSONArray(response);
Assert.assertEquals(results.length(), 1);
row = (JSONObject) results.get(0);
Assert.assertEquals(row.get("typeName"), "Manager");
//search for person in hr department whose name starts is john/jahn
response = discoveryService.searchByFullText("hr AND (john OR jahn)");
Assert.assertNotNull(response);
results = new JSONArray(response);
Assert.assertEquals(results.length(), 1);
row = (JSONObject) results.get(0);
Assert.assertEquals(row.get("typeName"), "Person");
}
/*
private void dumpGraph() {
TitanGraph graph = titanGraphService.getTitanGraph();
......
......@@ -17,14 +17,17 @@
#
######### Graph Database Configs #########
#Refer http://s3.thinkaurelius.com/docs/titan/0.5.1/titan-config-ref.html
# Graph Storage
metadata.graph.storage.backend=inmemory
# Graph Search Index
metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=./target/data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=1000
######### Hive Lineage Configs #########
......
......@@ -19,14 +19,14 @@
######### Graph Database Configs #########
# Graph Storage
metadata.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=./data/berkeley
metadata.graph.storage.directory=data/berkley
# Graph Search Index
metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=./data/es
metadata.graph.index.search.directory=data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs #########
# This models follows the quick-start guide
......
......@@ -33,8 +33,6 @@ import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class DataTypes {
......
......@@ -19,7 +19,6 @@
package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.metadata.MetadataException;
......
......@@ -21,8 +21,6 @@ package org.apache.hadoop.metadata.typesystem.types;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import java.util.Map;
public final class Multiplicity {
public static final Multiplicity OPTIONAL = new Multiplicity(0, 1, false);
......
......@@ -66,22 +66,22 @@
<logger name="org.apache.hadoop.metadata" additivity="false">
<level value="debug"/>
<appender-ref ref="FILE"/>
<appender-ref ref="console"/>
</logger>
<logger name="com.thinkaurelius.titan" additivity="false">
<level value="warn"/>
<appender-ref ref="FILE"/>
<appender-ref ref="console"/>
</logger>
<logger name="org.elasticsearch" additivity="false">
<level value="warn"/>
<appender-ref ref="FILE"/>
<appender-ref ref="console"/>
</logger>
<logger name="org.apache.lucene" additivity="false">
<level value="warn"/>
<appender-ref ref="FILE"/>
<appender-ref ref="console"/>
</logger>
<root>
......
......@@ -19,10 +19,13 @@
package org.apache.hadoop.metadata.typesystem.json;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.typesystem.types.BaseTest;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.*;
import org.apache.hadoop.metadata.typesystem.ITypedInstance;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.BaseTest;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
......
......@@ -317,6 +317,10 @@
<name>truststore.file</name>
<value>${project.build.directory}/../../webapp/target/metadata.keystore</value>
</systemProperty>
<systemProperty>
<name>metadata.home</name>
<value>${project.build.directory}</value>
</systemProperty>
</systemProperties>
<stopKey>metadata-stop</stopKey>
<stopPort>41001</stopPort>
......
......@@ -38,6 +38,7 @@ public final class Main {
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
private static final String APP_PATH = "app";
private static final String APP_PORT = "port";
private static final String METADATA_HOME = "metadata.home";
/**
* Prevent users from constructing this.
......@@ -70,6 +71,7 @@ public final class Main {
appPath = cmd.getOptionValue(APP_PATH);
}
setApplicationHome();
PropertiesConfiguration configuration = PropertiesUtil.getApplicationProperties();
final String enableTLSFlag = configuration.getString("metadata.enableTLS");
final int appPort = getApplicationPort(cmd, enableTLSFlag);
......@@ -81,7 +83,13 @@ public final class Main {
server.start();
}
private static String getProjectVersion(PropertiesConfiguration buildConfiguration) {
private static void setApplicationHome() {
if (System.getProperty(METADATA_HOME) == null) {
System.setProperty(METADATA_HOME, "target");
}
}
public static String getProjectVersion(PropertiesConfiguration buildConfiguration) {
return buildConfiguration.getString("project.version");
}
......
......@@ -33,12 +33,10 @@ import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.web.filters.AuditFilter;
import org.apache.hadoop.metadata.web.filters.MetadataAuthenticationFilter;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletException;
import java.util.HashMap;
import java.util.Map;
......
......@@ -205,7 +205,7 @@ public class MetadataDiscoveryResource {
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put("query", query);
response.put("queryType", "full-text");
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
response.put(MetadataServiceClient.RESULTS, new JSONArray(jsonResult));
return Response.ok(response).build();
} catch (DiscoveryException e) {
......
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
######### Graph Database Configs #########
# Graph Storage
metadata.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=target/data/berkley
# Graph Search Index
metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=target/data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs #########
# This models follows the quick-start guide
metadata.lineage.hive.table.type.name=hive_table
metadata.lineage.hive.column.type.name=hive_column
metadata.lineage.hive.table.column.name=columns
metadata.lineage.hive.process.type.name=hive_process
metadata.lineage.hive.process.inputs.name=inputTables
metadata.lineage.hive.process.outputs.name=outputTables
######### Security Properties #########
# SSL config
metadata.enableTLS=false
######### Security Properties #########
<?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
<appender name="console" class="org.apache.log4j.ConsoleAppender">
<param name="Target" value="System.out"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/>
</layout>
</appender>
<appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="${metadata.log.dir}/application.log"/>
<param name="Append" value="true"/>
<param name="Threshold" value="debug"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/>
</layout>
</appender>
<appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="${metadata.log.dir}/audit.log"/>
<param name="Append" value="true"/>
<param name="Threshold" value="debug"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %x %m%n"/>
</layout>
</appender>
<logger name="org.apache.hadoop.metadata" additivity="false">
<level value="debug"/>
<appender-ref ref="FILE"/>
</logger>
<logger name="com.thinkaurelius.titan" additivity="false">
<level value="info"/>
<appender-ref ref="FILE"/>
</logger>
<logger name="org.elasticsearch" additivity="false">
<level value="info"/>
<appender-ref ref="FILE"/>
</logger>
<logger name="org.apache.lucene" additivity="false">
<level value="info"/>
<appender-ref ref="FILE"/>
</logger>
<logger name="com.google" additivity="false">
<level value="info"/>
<appender-ref ref="FILE"/>
</logger>
<logger name="AUDIT">
<level value="info"/>
<appender-ref ref="AUDIT"/>
</logger>
<root>
<priority value="info"/>
<appender-ref ref="FILE"/>
</root>
</log4j:configuration>
......@@ -150,12 +150,20 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test(enabled = false)
public void testSearchUsingFullText() throws Exception {
String query = "foo bar";
String query = "foundation_etl";
JSONObject response = serviceClient.searchByFullText(query);
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
Assert.assertEquals(response.getString("query"), query);
Assert.assertEquals(response.getString("queryType"), "full-text");
JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
Assert.assertEquals(results.length(), 1);
JSONObject row = results.getJSONObject(0);
Assert.assertNotNull(row.get("guid"));
Assert.assertEquals(row.getString("typeName"), "dsl_test_type");
Assert.assertNotNull(row.get("score"));
}
private void createTypes() throws Exception {
......
......@@ -14,112 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.web.service;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.metadata.web.resources.*;
import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.mortbay.jetty.webapp.WebAppContext;
import org.testng.Assert;
import org.testng.TestListenerAdapter;
import org.testng.TestNG;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import javax.ws.rs.core.UriBuilder;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.file.Files;
import java.util.List;
/**
*
*/
public class SecureEmbeddedServerIT {
private SecureEmbeddedServer secureEmbeddedServer;
private String providerUrl;
private Path jksPath;
protected WebResource service;
static {
//for localhost testing only
javax.net.ssl.HttpsURLConnection.setDefaultHostnameVerifier(
new javax.net.ssl.HostnameVerifier(){
public boolean verify(String hostname,
javax.net.ssl.SSLSession sslSession) {
if (hostname.equals("localhost")) {
return true;
}
return false;
}
});
System.setProperty("javax.net.ssl.trustStore", SecureEmbeddedServer.DEFAULT_KEYSTORE_FILE_LOCATION);
System.setProperty("javax.net.ssl.trustStorePassword", "keypass");
System.setProperty("javax.net.ssl.trustStoreType", "JKS");
}
@BeforeClass
public void setupServerURI () throws Exception {
BaseResourceIT.baseUrl = "https://localhost:21443";
}
@AfterClass
public void resetServerURI() throws Exception {
BaseResourceIT.baseUrl = "http://localhost:21000";
}
@BeforeMethod
public void setup() throws Exception {
jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
String baseUrl = "https://localhost:21443/";
DefaultClientConfig config = new DefaultClientConfig();
Client client = Client.create(config);
client.resource(UriBuilder.fromUri(baseUrl).build());
service = client.resource(UriBuilder.fromUri(baseUrl).build());
}
@Test
public void testNoConfiguredCredentialProvider() throws Exception {
try {
secureEmbeddedServer = new SecureEmbeddedServer(21443, "webapp/target/metadata-governance");
WebAppContext webapp = new WebAppContext();
webapp.setContextPath("/");
webapp.setWar(System.getProperty("user.dir") + getWarPath());
secureEmbeddedServer.server.setHandler(webapp);
secureEmbeddedServer.server.start();
Assert.fail("Should have thrown an exception");
} catch (IOException e) {
assert e.getMessage().equals("No credential provider path configured for storage of certificate store passwords");
} finally {
secureEmbeddedServer.server.stop();
}
}
public class SecureEmbeddedServerIT extends SecureEmbeddedServerITBase{
@Test
public void testServerConfiguredUsingCredentialProvider() throws Exception {
// setup the configuration
......@@ -128,18 +33,15 @@ public class SecureEmbeddedServerIT {
// setup the credential provider
setupCredentials();
SecureEmbeddedServer secureEmbeddedServer = null;
try {
secureEmbeddedServer = new SecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
String appPath = System.getProperty("user.dir") + getWarPath();
secureEmbeddedServer = new SecureEmbeddedServer(21443, appPath) {
@Override
protected PropertiesConfiguration getConfiguration() {
return configuration;
}
};
WebAppContext webapp = new WebAppContext();
webapp.setContextPath("/");
webapp.setWar(System.getProperty("user.dir") + getWarPath());
secureEmbeddedServer.server.setHandler(webapp);
secureEmbeddedServer.server.start();
URL url = new URL("https://localhost:21443/");
......@@ -148,110 +50,10 @@ public class SecureEmbeddedServerIT {
connection.connect();
// test to see whether server is up and root page can be served
assert connection.getResponseCode() == 200;
Assert.assertEquals(connection.getResponseCode(), 200);
} finally {
secureEmbeddedServer.server.stop();
}
}
@Test
public void testMissingEntriesInCredentialProvider() throws Exception {
// setup the configuration
final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(SecureEmbeddedServer.CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
try {
secureEmbeddedServer = new SecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
@Override
protected PropertiesConfiguration getConfiguration() {
return configuration;
}
};
Assert.fail("No entries should generate an exception");
} catch (IOException e) {
assert e.getMessage().startsWith("No credential entry found for");
} finally {
secureEmbeddedServer.server.stop();
}
}
/**
* Runs the existing webapp test cases, this time against the initiated secure server instance.
* @throws Exception
*/
@Test
public void runOtherSuitesAgainstSecureServer() throws Exception {
final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(SecureEmbeddedServer.CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
// setup the credential provider
setupCredentials();
try {
secureEmbeddedServer = new SecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
@Override
protected PropertiesConfiguration getConfiguration() {
return configuration;
}
};
WebAppContext webapp = new WebAppContext();
webapp.setContextPath("/");
webapp.setWar(System.getProperty("user.dir") + getWarPath());
secureEmbeddedServer.server.setHandler(webapp);
secureEmbeddedServer.server.start();
TestListenerAdapter tla = new TestListenerAdapter();
TestNG testng = new TestNG();
testng.setTestClasses(new Class[] { AdminJerseyResourceIT.class, EntityJerseyResourceIT.class,
MetadataDiscoveryJerseyResourceIT.class, RexsterGraphJerseyResourceIT.class, TypesJerseyResourceIT.class});
testng.addListener(tla);
testng.run();
} finally {
secureEmbeddedServer.server.stop();
}
}
protected String getWarPath() {
return String.format("/target/metadata-webapp-%s.war",
System.getProperty("project.version", "0.1-incubating-SNAPSHOT"));
}
private void setupCredentials()
throws Exception {
Configuration conf = new Configuration(false);
File file = new File(jksPath.toUri().getPath());
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
// create new aliases
try {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks
provider.flush();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.web.service;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.metadata.web.resources.*;
import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.mortbay.jetty.webapp.WebAppContext;
import org.testng.Assert;
import org.testng.TestListenerAdapter;
import org.testng.TestNG;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import javax.ws.rs.core.UriBuilder;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.file.Files;
import java.util.List;
/**
*
*/
public class SecureEmbeddedServerITBase {
private SecureEmbeddedServer secureEmbeddedServer;
protected String providerUrl;
private Path jksPath;
protected WebResource service;
static {
//for localhost testing only
javax.net.ssl.HttpsURLConnection.setDefaultHostnameVerifier(
new javax.net.ssl.HostnameVerifier(){
public boolean verify(String hostname,
javax.net.ssl.SSLSession sslSession) {
if (hostname.equals("localhost")) {
return true;
}
return false;
}
});
System.setProperty("javax.net.ssl.trustStore", SecureEmbeddedServer.DEFAULT_KEYSTORE_FILE_LOCATION);
System.setProperty("javax.net.ssl.trustStorePassword", "keypass");
System.setProperty("javax.net.ssl.trustStoreType", "JKS");
}
@BeforeClass
public void setupServerURI () throws Exception {
BaseResourceIT.baseUrl = "https://localhost:21443";
}
@AfterClass
public void resetServerURI() throws Exception {
BaseResourceIT.baseUrl = "http://localhost:21000";
}
@BeforeMethod
public void setup() throws Exception {
jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
String baseUrl = "https://localhost:21443/";
DefaultClientConfig config = new DefaultClientConfig();
Client client = Client.create(config);
client.resource(UriBuilder.fromUri(baseUrl).build());
service = client.resource(UriBuilder.fromUri(baseUrl).build());
}
@Test
public void testNoConfiguredCredentialProvider() throws Exception {
try {
secureEmbeddedServer = new SecureEmbeddedServer(21443, "webapp/target/metadata-governance");
WebAppContext webapp = new WebAppContext();
webapp.setContextPath("/");
webapp.setWar(System.getProperty("user.dir") + getWarPath());
secureEmbeddedServer.server.setHandler(webapp);
secureEmbeddedServer.server.start();
Assert.fail("Should have thrown an exception");
} catch (IOException e) {
assert e.getMessage().equals("No credential provider path configured for storage of certificate store passwords");
} finally {
secureEmbeddedServer.server.stop();
}
}
@Test
public void testMissingEntriesInCredentialProvider() throws Exception {
// setup the configuration
final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(SecureEmbeddedServer.CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
try {
secureEmbeddedServer = new SecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
@Override
protected PropertiesConfiguration getConfiguration() {
return configuration;
}
};
Assert.fail("No entries should generate an exception");
} catch (IOException e) {
assert e.getMessage().startsWith("No credential entry found for");
} finally {
secureEmbeddedServer.server.stop();
}
}
/**
* Runs the existing webapp test cases, this time against the initiated secure server instance.
* @throws Exception
*/
@Test
public void runOtherSuitesAgainstSecureServer() throws Exception {
final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(SecureEmbeddedServer.CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
// setup the credential provider
setupCredentials();
try {
secureEmbeddedServer = new SecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
@Override
protected PropertiesConfiguration getConfiguration() {
return configuration;
}
};
WebAppContext webapp = new WebAppContext();
webapp.setContextPath("/");
webapp.setWar(System.getProperty("user.dir") + getWarPath());
secureEmbeddedServer.server.setHandler(webapp);
secureEmbeddedServer.server.start();
TestListenerAdapter tla = new TestListenerAdapter();
TestNG testng = new TestNG();
testng.setTestClasses(new Class[] { AdminJerseyResourceIT.class, EntityJerseyResourceIT.class,
MetadataDiscoveryJerseyResourceIT.class, RexsterGraphJerseyResourceIT.class, TypesJerseyResourceIT.class});
testng.addListener(tla);
testng.run();
} finally {
secureEmbeddedServer.server.stop();
}
}
protected String getWarPath() {
return String.format("/target/metadata-webapp-%s",
System.getProperty("project.version", "0.1-incubating-SNAPSHOT"));
}
protected void setupCredentials() throws Exception {
Configuration conf = new Configuration(false);
File file = new File(jksPath.toUri().getPath());
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
// create new aliases
try {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks
provider.flush();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment