Commit 1380771d by Venkatesh Seetharam

Merge remote-tracking branch 'origin/master'

Conflicts: addons/hive-bridge/src/site/twiki/Bridge-Hive.twiki webapp/pom.xml webapp/src/test/java/org/apache/hadoop/metadata/web/listeners/LoginProcessorIT.java
parents fcbce418 a15b6ef1
...@@ -34,3 +34,4 @@ maven-eclipse.xml ...@@ -34,3 +34,4 @@ maven-eclipse.xml
#log files #log files
logs logs
*.log *.log
test-output
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
...@@ -103,6 +103,20 @@ ...@@ -103,6 +103,20 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId> <groupId>org.testng</groupId>
<artifactId>testng</artifactId> <artifactId>testng</artifactId>
</dependency> </dependency>
......
File mode changed from 100644 to 100755
...@@ -62,7 +62,7 @@ public class HiveMetaStoreBridge { ...@@ -62,7 +62,7 @@ public class HiveMetaStoreBridge {
} }
} }
public static final String DGI_URL_PROPERTY = "hive.dgi.url"; public static final String DGI_URL_PROPERTY = "hive.hook.dgi.url";
private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreBridge.class); private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreBridge.class);
......
...@@ -78,6 +78,8 @@ import java.util.Map; ...@@ -78,6 +78,8 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
/** /**
...@@ -91,6 +93,14 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -91,6 +93,14 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
private static final int WAIT_TIME = 3; private static final int WAIT_TIME = 3;
private static ExecutorService executor; private static ExecutorService executor;
private static final String MIN_THREADS = "hive.hook.dgi.minThreads";
private static final String MAX_THREADS = "hive.hook.dgi.maxThreads";
private static final String KEEP_ALIVE_TIME = "hive.hook.dgi.keepAliveTime";
private static final int minThreadsDefault = 5;
private static final int maxThreadsDefault = 5;
private static final long keepAliveTimeDefault = 10;
static { static {
// anything shared should be initialized here and destroyed in the // anything shared should be initialized here and destroyed in the
// shutdown hook The hook contract is weird in that it creates a // shutdown hook The hook contract is weird in that it creates a
...@@ -99,7 +109,14 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -99,7 +109,14 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
// initialize the async facility to process hook calls. We don't // initialize the async facility to process hook calls. We don't
// want to do this inline since it adds plenty of overhead for the // want to do this inline since it adds plenty of overhead for the
// query. // query.
executor = Executors.newSingleThreadExecutor( HiveConf hiveConf = new HiveConf();
int minThreads = hiveConf.getInt(MIN_THREADS, minThreadsDefault);
int maxThreads = hiveConf.getInt(MAX_THREADS, maxThreadsDefault);
long keepAliveTime = hiveConf.getLong(KEEP_ALIVE_TIME, keepAliveTimeDefault);
executor = new ThreadPoolExecutor(minThreads, maxThreads,
keepAliveTime, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>(),
new ThreadFactoryBuilder() new ThreadFactoryBuilder()
.setDaemon(true) .setDaemon(true)
.setNameFormat("DGI Logger %d") .setNameFormat("DGI Logger %d")
...@@ -199,8 +216,16 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -199,8 +216,16 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
} }
private void registerCTAS(HiveMetaStoreBridge dgiBridge, HookContext hookContext, HiveConf conf) throws Exception { private void registerCTAS(HiveMetaStoreBridge dgiBridge, HookContext hookContext, HiveConf conf) throws Exception {
LOG.debug("Registering CTAS");
Set<ReadEntity> inputs = hookContext.getInputs(); Set<ReadEntity> inputs = hookContext.getInputs();
Set<WriteEntity> outputs = hookContext.getOutputs(); Set<WriteEntity> outputs = hookContext.getOutputs();
//Even explain CTAS has operation name as CREATETABLE_AS_SELECT
if (inputs.isEmpty() && outputs.isEmpty()) {
LOG.info("Explain statement. Skipping...");
}
String user = hookContext.getUserName(); String user = hookContext.getUserName();
HiveOperation operation = HiveOperation.valueOf(hookContext.getOperationName()); HiveOperation operation = HiveOperation.valueOf(hookContext.getOperationName());
String queryId = null; String queryId = null;
...@@ -214,7 +239,6 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -214,7 +239,6 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
queryStartTime = plan.getQueryStartTime(); queryStartTime = plan.getQueryStartTime();
} }
Referenceable processReferenceable = new Referenceable(HiveDataTypes.HIVE_PROCESS.getName()); Referenceable processReferenceable = new Referenceable(HiveDataTypes.HIVE_PROCESS.getName());
processReferenceable.set("processName", operation.getOperationName()); processReferenceable.set("processName", operation.getOperationName());
processReferenceable.set("startTime", queryStartTime); processReferenceable.set("startTime", queryStartTime);
...@@ -311,7 +335,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -311,7 +335,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
ExplainTask explain = new ExplainTask(); ExplainTask explain = new ExplainTask();
explain.initialize(conf, queryPlan, null); explain.initialize(conf, queryPlan, null);
org.json.JSONObject explainPlan = explain.getJSONLogicalPlan(null, ew); org.json.JSONObject explainPlan = explain.getJSONPlan(null, ew);
return explainPlan.toString(); return explainPlan.toString();
} }
......
...@@ -20,12 +20,13 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin ...@@ -20,12 +20,13 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin
---++ Importing Hive Metadata ---++ Importing Hive Metadata
org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge imports the hive metadata into DGI using the typesystem defined in org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator. import-hive.sh command can be used to facilitate this. Set-up the following configs in <dgi package>/conf/hive-site.xml: org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge imports the hive metadata into DGI using the typesystem defined in org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator. import-hive.sh command can be used to facilitate this.
Set-up the following configs in <dgi package>/conf/hive-site.xml:
* Hive metastore configuration - Refer [[https://cwiki.apache.org/confluence/display/Hive/AdminManual+MetastoreAdmin][Hive Metastore Configuration documentation]] * Hive metastore configuration - Refer [[https://cwiki.apache.org/confluence/display/Hive/AdminManual+MetastoreAdmin][Hive Metastore Configuration documentation]]
* DGI endpoint - Add the following property with the DGI endpoint for your set-up * DGI endpoint - Add the following property with the DGI endpoint for your set-up
<verbatim> <verbatim>
<property> <property>
<name>hive.dgi.url</name> <name>hive.hook.dgi.url</name>
<value>http://localhost:21000/</value> <value>http://localhost:21000/</value>
</property> </property>
</verbatim> </verbatim>
...@@ -34,13 +35,26 @@ Usage: <dgi package>/bin/import-hive.sh ...@@ -34,13 +35,26 @@ Usage: <dgi package>/bin/import-hive.sh
---++ Hive Hook ---++ Hive Hook
Hive supports listeners on hive command execution using hive hooks. This can be used to add/update/remove entities in DGI. Follow the following instructions in your hive set-up Hive supports listeners on hive command execution using hive hooks. This is used to add/update/remove entities in DGI using the model defined in org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator.
* Add org.apache.hadoop.metadata.hive.hook.HiveHook as post execution hook in hive-ste.xml The hook submits the request to a thread pool executor to avoid blocking the command execution. Follow the these instructions in your hive set-up to add hive hook for DGI:
* Add org.apache.hadoop.metadata.hive.hook.HiveHook as post execution hook in hive-site.xml
<verbatim> <verbatim>
<property> <property>
<name>hive.exec.post.hooks</name> <name>hive.exec.post.hooks</name>
<value>org.apache.hadoop.metadata.hive.hook.HiveHook</value> <value>org.apache.hadoop.metadata.hive.hook.HiveHook</value>
</property> </property>
</verbatim> </verbatim>
* Add the following property in hive-ste.xml with the DGI endpoint for your set-up
<verbatim>
<property>
<name>hive.hook.dgi.url</name>
<value>http://localhost:21000/</value>
</property>
</verbatim>
* Add 'export HIVE_AUX_JARS_PATH=<dgi package>/hook/hive' in hive-env.sh * Add 'export HIVE_AUX_JARS_PATH=<dgi package>/hook/hive' in hive-env.sh
* Restart hive-server2
The following properties in hive-site.xml control the thread pool details:
* hive.hook.dgi.minThreads - core number of threads. default 5
* hive.hook.dgi.maxThreads - maximum number of threads. default 5
* hive.hook.dgi.keepAliveTime - keep alive time in msecs. default 10
...@@ -69,6 +69,7 @@ public class HiveHookIT { ...@@ -69,6 +69,7 @@ public class HiveHookIT {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore"); hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore");
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL); hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("debug", "true"); hiveConf.set("debug", "true");
return hiveConf; return hiveConf;
} }
......
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
...@@ -29,7 +29,7 @@ import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository ...@@ -29,7 +29,7 @@ import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository
import org.apache.hadoop.metadata.repository.graph.GraphBackedSearchIndexer; import org.apache.hadoop.metadata.repository.graph.GraphBackedSearchIndexer;
import org.apache.hadoop.metadata.repository.graph.GraphProvider; import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.repository.graph.TitanGraphProvider; import org.apache.hadoop.metadata.repository.graph.TitanGraphProvider;
import org.apache.hadoop.metadata.repository.typestore.GraphTypeStore; import org.apache.hadoop.metadata.repository.typestore.GraphBackedTypeStore;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore; import org.apache.hadoop.metadata.repository.typestore.ITypeStore;
import org.apache.hadoop.metadata.services.DefaultMetadataService; import org.apache.hadoop.metadata.services.DefaultMetadataService;
import org.apache.hadoop.metadata.services.MetadataService; import org.apache.hadoop.metadata.services.MetadataService;
...@@ -55,7 +55,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule { ...@@ -55,7 +55,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// get the impl classes for the repo and the graph service // get the impl classes for the repo and the graph service
// this.graphServiceClass = gsp.getImplClass(); // this.graphServiceClass = gsp.getImplClass();
this.metadataRepoClass = GraphBackedMetadataRepository.class; this.metadataRepoClass = GraphBackedMetadataRepository.class;
this.typeStore = GraphTypeStore.class; this.typeStore = GraphBackedTypeStore.class;
this.metadataService = DefaultMetadataService.class; this.metadataService = DefaultMetadataService.class;
this.discoveryService = GraphBackedDiscoveryService.class; this.discoveryService = GraphBackedDiscoveryService.class;
this.searchIndexer = GraphBackedSearchIndexer.class; this.searchIndexer = GraphBackedSearchIndexer.class;
......
...@@ -27,6 +27,11 @@ import java.util.Map; ...@@ -27,6 +27,11 @@ import java.util.Map;
public interface DiscoveryService { public interface DiscoveryService {
/** /**
* Full text search
*/
String searchByFullText(String query) throws DiscoveryException;
/**
* Search using query DSL. * Search using query DSL.
* *
* @param dslQuery query in DSL format. * @param dslQuery query in DSL format.
......
...@@ -18,9 +18,15 @@ ...@@ -18,9 +18,15 @@
package org.apache.hadoop.metadata.discovery.graph; package org.apache.hadoop.metadata.discovery.graph;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.TitanGraphQuery;
import com.thinkaurelius.titan.core.TitanProperty; import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex; import com.thinkaurelius.titan.core.TitanVertex;
import com.thinkaurelius.titan.core.attribute.Text;
import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.discovery.DiscoveryException; import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.DiscoveryService; import org.apache.hadoop.metadata.discovery.DiscoveryService;
import org.apache.hadoop.metadata.query.Expressions; import org.apache.hadoop.metadata.query.Expressions;
...@@ -30,8 +36,12 @@ import org.apache.hadoop.metadata.query.GremlinQueryResult; ...@@ -30,8 +36,12 @@ import org.apache.hadoop.metadata.query.GremlinQueryResult;
import org.apache.hadoop.metadata.query.GremlinTranslator; import org.apache.hadoop.metadata.query.GremlinTranslator;
import org.apache.hadoop.metadata.query.QueryParser; import org.apache.hadoop.metadata.query.QueryParser;
import org.apache.hadoop.metadata.query.QueryProcessor; import org.apache.hadoop.metadata.query.QueryProcessor;
import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphProvider; import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import scala.util.Either; import scala.util.Either;
...@@ -44,8 +54,11 @@ import javax.script.ScriptEngineManager; ...@@ -44,8 +54,11 @@ import javax.script.ScriptEngineManager;
import javax.script.ScriptException; import javax.script.ScriptException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
/** /**
* Graph backed implementation of Search. * Graph backed implementation of Search.
...@@ -64,6 +77,23 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -64,6 +77,23 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository); this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
} }
@Override
public String searchByFullText(String query) throws DiscoveryException {
Iterator iterator = titanGraph.query().has(Constants.ENTITY_TEXT_PROPERTY_KEY, Text.CONTAINS, query).vertices().iterator();
JsonArray results = new JsonArray();
while (iterator.hasNext()) {
Vertex vertex = (Vertex) iterator.next();
JsonObject row = new JsonObject();
row.addProperty("guid", vertex.<String>getProperty(Constants.GUID_PROPERTY_KEY));
row.addProperty("typeName", vertex.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY));
results.add(row);
}
JsonObject response = new JsonObject();
response.addProperty("query", query);
response.add("results", results);
return response.toString();
}
/** /**
* Search using query DSL. * Search using query DSL.
* *
...@@ -118,47 +148,48 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -118,47 +148,48 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
try { try {
Object o = engine.eval(gremlinQuery, bindings); Object o = engine.eval(gremlinQuery, bindings);
if (!(o instanceof List)) { return extractResult(o);
throw new DiscoveryException( } catch (ScriptException se) {
String.format("Cannot process gremlin result %s", o.toString())); throw new DiscoveryException(se);
} }
}
List l = (List) o; private List<Map<String, String>> extractResult(Object o) throws DiscoveryException {
List<Map<String, String>> result = new ArrayList<>(); if (!(o instanceof List)) {
for (Object r : l) { throw new DiscoveryException(String.format("Cannot process result %s", o.toString()));
}
Map<String, String> oRow = new HashMap<>();
if (r instanceof Map) {
@SuppressWarnings("unchecked")
Map<Object, Object> iRow = (Map) r;
for (Map.Entry e : iRow.entrySet()) {
Object k = e.getKey();
Object v = e.getValue();
oRow.put(k.toString(), v.toString());
}
} else if (r instanceof TitanVertex) {
Iterable<TitanProperty> ps = ((TitanVertex) r).getProperties();
for (TitanProperty tP : ps) {
String pName = tP.getPropertyKey().getName();
Object pValue = ((TitanVertex) r).getProperty(pName);
if (pValue != null) {
oRow.put(pName, pValue.toString());
}
}
} else if (r instanceof String) { List l = (List) o;
oRow.put("", r.toString()); List<Map<String, String>> result = new ArrayList<>();
} else { for (Object r : l) {
throw new DiscoveryException(
String.format("Cannot process gremlin result %s", o.toString())); Map<String, String> oRow = new HashMap<>();
if (r instanceof Map) {
@SuppressWarnings("unchecked")
Map<Object, Object> iRow = (Map) r;
for (Map.Entry e : iRow.entrySet()) {
Object k = e.getKey();
Object v = e.getValue();
oRow.put(k.toString(), v.toString());
}
} else if (r instanceof TitanVertex) {
Iterable<TitanProperty> ps = ((TitanVertex) r).getProperties();
for (TitanProperty tP : ps) {
String pName = tP.getPropertyKey().getName();
Object pValue = ((TitanVertex) r).getProperty(pName);
if (pValue != null) {
oRow.put(pName, pValue.toString());
}
} }
result.add(oRow); } else if (r instanceof String) {
oRow.put("", r.toString());
} else {
throw new DiscoveryException(String.format("Cannot process result %s", o.toString()));
} }
return result;
} catch (ScriptException se) { result.add(oRow);
throw new DiscoveryException(se);
} }
return result;
} }
} }
...@@ -32,6 +32,8 @@ public final class Constants { ...@@ -32,6 +32,8 @@ public final class Constants {
public static final String ENTITY_TYPE_PROPERTY_KEY = "typeName"; public static final String ENTITY_TYPE_PROPERTY_KEY = "typeName";
public static final String ENTITY_TYPE_INDEX = "type_index"; public static final String ENTITY_TYPE_INDEX = "type_index";
public static final String ENTITY_TEXT_PROPERTY_KEY = "entityText";
/** /**
* Properties for type store graph * Properties for type store graph
*/ */
......
...@@ -36,9 +36,12 @@ import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; ...@@ -36,9 +36,12 @@ import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.hadoop.metadata.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.persistence.MapIds; import org.apache.hadoop.metadata.typesystem.persistence.MapIds;
import org.apache.hadoop.metadata.typesystem.persistence.StructInstance;
import org.apache.hadoop.metadata.typesystem.types.AttributeInfo; import org.apache.hadoop.metadata.typesystem.types.AttributeInfo;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumType;
import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.IDataType; import org.apache.hadoop.metadata.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.ObjectGraphWalker; import org.apache.hadoop.metadata.typesystem.types.ObjectGraphWalker;
...@@ -67,6 +70,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -67,6 +70,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
private static final Logger LOG = private static final Logger LOG =
LoggerFactory.getLogger(GraphBackedMetadataRepository.class); LoggerFactory.getLogger(GraphBackedMetadataRepository.class);
private static final String FULL_TEXT_DELIMITER = " ";
private final AtomicInteger ID_SEQ = new AtomicInteger(0); private final AtomicInteger ID_SEQ = new AtomicInteger(0);
...@@ -422,7 +426,71 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -422,7 +426,71 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
List<ITypedReferenceableInstance> newTypedInstances = discoverInstances(entityProcessor); List<ITypedReferenceableInstance> newTypedInstances = discoverInstances(entityProcessor);
entityProcessor.createVerticesForClassTypes(newTypedInstances); entityProcessor.createVerticesForClassTypes(newTypedInstances);
return addDiscoveredInstances(typedInstance, entityProcessor, newTypedInstances); String guid = addDiscoveredInstances(typedInstance, entityProcessor, newTypedInstances);
addFullTextProperty(entityProcessor, newTypedInstances);
return guid;
}
private void addFullTextProperty(EntityProcessor entityProcessor, List<ITypedReferenceableInstance> newTypedInstances) throws MetadataException {
for (ITypedReferenceableInstance typedInstance : newTypedInstances) { // Traverse
Id id = typedInstance.getId();
Vertex instanceVertex = entityProcessor.idToVertexMap.get(id);
String fullText = getFullText(instanceVertex, true);
instanceVertex.setProperty(Constants.ENTITY_TEXT_PROPERTY_KEY, fullText);
}
}
private String getFullText(Vertex instanceVertex, boolean followReferences) throws MetadataException {
String guid = instanceVertex.getProperty(Constants.GUID_PROPERTY_KEY);
ITypedReferenceableInstance typedReference = graphToInstanceMapper.mapGraphToTypedInstance(guid, instanceVertex);
return getFullText(typedReference, followReferences);
}
private String getFullText(ITypedInstance typedInstance, boolean followReferences) throws MetadataException {
StringBuilder fullText = new StringBuilder();
for (AttributeInfo attributeInfo : typedInstance.fieldMapping().fields.values()) {
Object attrValue = typedInstance.get(attributeInfo.name);
if (attrValue == null) {
continue;
}
String attrFullText = null;
switch(attributeInfo.dataType().getTypeCategory()) {
case PRIMITIVE:
attrFullText = String.valueOf(attrValue);
break;
case ENUM:
attrFullText = ((EnumValue)attrValue).value;
break;
case ARRAY:
break;
case MAP:
break;
case CLASS:
if (followReferences) {
String refGuid = ((ITypedReferenceableInstance) attrValue).getId()._getId();
Vertex refVertex = getVertexForGUID(refGuid);
attrFullText = getFullText(refVertex, false);
}
break;
case STRUCT:
case TRAIT:
if (followReferences) {
attrFullText = getFullText((ITypedInstance) attrValue, false);
}
break;
}
if (attrFullText != null) {
fullText = fullText.append(FULL_TEXT_DELIMITER).append(attributeInfo.name)
.append(FULL_TEXT_DELIMITER).append(attrFullText);
}
}
return fullText.toString();
} }
/** /**
...@@ -518,6 +586,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -518,6 +586,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
IDataType dataType) throws MetadataException { IDataType dataType) throws MetadataException {
LOG.debug("mapping attributeInfo {}", attributeInfo); LOG.debug("mapping attributeInfo {}", attributeInfo);
final String propertyName = typedInstance.getTypeName() + "." + attributeInfo.name; final String propertyName = typedInstance.getTypeName() + "." + attributeInfo.name;
if (typedInstance.get(attributeInfo.name) == null) {
return;
}
switch (dataType.getTypeCategory()) { switch (dataType.getTypeCategory()) {
case PRIMITIVE: case PRIMITIVE:
...@@ -824,8 +895,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -824,8 +895,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
AttributeInfo attributeInfo) throws MetadataException { AttributeInfo attributeInfo) throws MetadataException {
LOG.debug("mapping attributeInfo = " + attributeInfo); LOG.debug("mapping attributeInfo = " + attributeInfo);
final IDataType dataType = attributeInfo.dataType(); final IDataType dataType = attributeInfo.dataType();
final String vertexPropertyName = final String vertexPropertyName = typedInstance.getTypeName() + "." + attributeInfo.name;
typedInstance.getTypeName() + "." + attributeInfo.name;
switch (dataType.getTypeCategory()) { switch (dataType.getTypeCategory()) {
case PRIMITIVE: case PRIMITIVE:
...@@ -833,6 +903,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -833,6 +903,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
break; // add only if vertex has this attribute break; // add only if vertex has this attribute
case ENUM: case ENUM:
if (instanceVertex.getProperty(vertexPropertyName) == null) {
return;
}
typedInstance.setInt(attributeInfo.name, typedInstance.setInt(attributeInfo.name,
instanceVertex.<Integer>getProperty(vertexPropertyName)); instanceVertex.<Integer>getProperty(vertexPropertyName));
break; break;
...@@ -1060,8 +1134,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1060,8 +1134,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
ITypedInstance typedInstance, ITypedInstance typedInstance,
AttributeInfo attributeInfo) throws MetadataException { AttributeInfo attributeInfo) throws MetadataException {
LOG.debug("Adding primitive {} from vertex {}", attributeInfo, instanceVertex); LOG.debug("Adding primitive {} from vertex {}", attributeInfo, instanceVertex);
final String vertexPropertyName = typedInstance.getTypeName() + "." + final String vertexPropertyName = typedInstance.getTypeName() + "." + attributeInfo.name;
attributeInfo.name;
if (instanceVertex.getProperty(vertexPropertyName) == null) { if (instanceVertex.getProperty(vertexPropertyName) == null) {
return; return;
} }
......
...@@ -94,9 +94,30 @@ public class GraphBackedSearchIndexer implements SearchIndexer { ...@@ -94,9 +94,30 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
createCompositeAndMixedIndex(Constants.TRAIT_NAMES_INDEX, createCompositeAndMixedIndex(Constants.TRAIT_NAMES_INDEX,
Constants.TRAIT_NAMES_PROPERTY_KEY, String.class, false, Cardinality.SET); Constants.TRAIT_NAMES_PROPERTY_KEY, String.class, false, Cardinality.SET);
//Index for full text search
createVertexMixedIndex(Constants.ENTITY_TEXT_PROPERTY_KEY, String.class, Cardinality.SINGLE);
//Indexes for graph backed type system store
createTypeStoreIndexes();
LOG.info("Index creation for global keys complete."); LOG.info("Index creation for global keys complete.");
} }
private void createTypeStoreIndexes() {
//Create unique index on typeName
createCompositeIndex(Constants.TYPENAME_PROPERTY_KEY, Constants.TYPENAME_PROPERTY_KEY, String.class,
true, Cardinality.SINGLE);
//Create index on vertex type + typeName
//todo doesn't work, review
TitanManagement management = titanGraph.getManagementSystem();
PropertyKey vertexType = management.makePropertyKey(Constants.VERTEX_TYPE_PROPERTY_KEY).dataType(String.class).make();
PropertyKey typeName = management.getPropertyKey(Constants.TYPENAME_PROPERTY_KEY);
management.buildIndex("byTypeName", Vertex.class).addKey(vertexType).addKey(typeName).buildCompositeIndex();
management.commit();
LOG.debug("Created composite index on {} and {}", Constants.VERTEX_TYPE_PROPERTY_KEY, Constants.TYPENAME_PROPERTY_KEY);
}
/** /**
* This is upon adding a new type to Store. * This is upon adding a new type to Store.
* *
......
...@@ -52,7 +52,8 @@ import java.util.ArrayList; ...@@ -52,7 +52,8 @@ import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
public class GraphTypeStore implements ITypeStore { public class GraphBackedTypeStore implements ITypeStore {
public static final String VERTEX_TYPE = "typeSystem";
private static final String PROPERTY_PREFIX = "type."; private static final String PROPERTY_PREFIX = "type.";
public static final String SUPERTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".supertype"; public static final String SUPERTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".supertype";
public static final String SUBTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".subtype"; public static final String SUBTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".subtype";
...@@ -60,12 +61,12 @@ public class GraphTypeStore implements ITypeStore { ...@@ -60,12 +61,12 @@ public class GraphTypeStore implements ITypeStore {
private static final ImmutableList META_PROPERTIES = ImmutableList.of(Constants.VERTEX_TYPE_PROPERTY_KEY, private static final ImmutableList META_PROPERTIES = ImmutableList.of(Constants.VERTEX_TYPE_PROPERTY_KEY,
Constants.TYPE_CATEGORY_PROPERTY_KEY, Constants.TYPENAME_PROPERTY_KEY); Constants.TYPE_CATEGORY_PROPERTY_KEY, Constants.TYPENAME_PROPERTY_KEY);
private static Logger LOG = LoggerFactory.getLogger(GraphTypeStore.class); private static Logger LOG = LoggerFactory.getLogger(GraphBackedTypeStore.class);
private final TitanGraph titanGraph; private final TitanGraph titanGraph;
@Inject @Inject
public GraphTypeStore(GraphProvider<TitanGraph> graphProvider) { public GraphBackedTypeStore(GraphProvider<TitanGraph> graphProvider) {
titanGraph = graphProvider.get(); titanGraph = graphProvider.get();
} }
...@@ -82,7 +83,7 @@ public class GraphTypeStore implements ITypeStore { ...@@ -82,7 +83,7 @@ public class GraphTypeStore implements ITypeStore {
for (String typeName : typeNames) { for (String typeName : typeNames) {
if (!coreTypes.contains(typeName)) { if (!coreTypes.contains(typeName)) {
IDataType dataType = typeSystem.getDataType(IDataType.class, typeName); IDataType dataType = typeSystem.getDataType(IDataType.class, typeName);
LOG.debug("Adding {}.{} to type store", dataType.getTypeCategory(), dataType.getName()); LOG.debug("Processing {}.{} in type store", dataType.getTypeCategory(), dataType.getName());
switch (dataType.getTypeCategory()) { switch (dataType.getTypeCategory()) {
case ENUM: case ENUM:
storeInGraph((EnumType)dataType); storeInGraph((EnumType)dataType);
...@@ -207,7 +208,7 @@ public class GraphTypeStore implements ITypeStore { ...@@ -207,7 +208,7 @@ public class GraphTypeStore implements ITypeStore {
@Override @Override
public TypesDef restore() throws MetadataException { public TypesDef restore() throws MetadataException {
//Get all vertices for type system //Get all vertices for type system
Iterator vertices = titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, PROPERTY_PREFIX).vertices().iterator(); Iterator vertices = titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE).vertices().iterator();
ImmutableList.Builder<EnumTypeDefinition> enums = ImmutableList.builder(); ImmutableList.Builder<EnumTypeDefinition> enums = ImmutableList.builder();
ImmutableList.Builder<StructTypeDefinition> structs = ImmutableList.builder(); ImmutableList.Builder<StructTypeDefinition> structs = ImmutableList.builder();
...@@ -294,10 +295,9 @@ public class GraphTypeStore implements ITypeStore { ...@@ -294,10 +295,9 @@ public class GraphTypeStore implements ITypeStore {
* @return vertex * @return vertex
*/ */
private Vertex findVertex(DataTypes.TypeCategory category, String typeName) { private Vertex findVertex(DataTypes.TypeCategory category, String typeName) {
LOG.debug("Finding vertex for ({} - {}), ({} - {})", Constants.TYPE_CATEGORY_PROPERTY_KEY, category, LOG.debug("Finding vertex for {}.{}", category, typeName);
Constants.TYPENAME_PROPERTY_KEY, typeName);
Iterator results = titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, PROPERTY_PREFIX) Iterator results = titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE)
.has(Constants.TYPENAME_PROPERTY_KEY, typeName).vertices().iterator(); .has(Constants.TYPENAME_PROPERTY_KEY, typeName).vertices().iterator();
Vertex vertex = null; Vertex vertex = null;
if (results != null && results.hasNext()) { if (results != null && results.hasNext()) {
...@@ -312,7 +312,7 @@ public class GraphTypeStore implements ITypeStore { ...@@ -312,7 +312,7 @@ public class GraphTypeStore implements ITypeStore {
if (vertex == null) { if (vertex == null) {
LOG.debug("Adding vertex {}{}", PROPERTY_PREFIX, typeName); LOG.debug("Adding vertex {}{}", PROPERTY_PREFIX, typeName);
vertex = titanGraph.addVertex(null); vertex = titanGraph.addVertex(null);
vertex.setProperty(Constants.VERTEX_TYPE_PROPERTY_KEY, PROPERTY_PREFIX); //Mark as type vertex vertex.setProperty(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE); //Mark as type vertex
vertex.setProperty(Constants.TYPE_CATEGORY_PROPERTY_KEY, category); vertex.setProperty(Constants.TYPE_CATEGORY_PROPERTY_KEY, category);
vertex.setProperty(Constants.TYPENAME_PROPERTY_KEY, typeName); vertex.setProperty(Constants.TYPENAME_PROPERTY_KEY, typeName);
} }
......
...@@ -37,6 +37,7 @@ import org.testng.Assert; ...@@ -37,6 +37,7 @@ import org.testng.Assert;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef; import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createOptionalAttrDef; import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef; import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createStructTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef; import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef;
/** /**
...@@ -61,10 +62,13 @@ public final class TestUtils { ...@@ -61,10 +62,13 @@ public final class TestUtils {
new EnumTypeDefinition("OrgLevel", new EnumValue("L1", 1), new EnumValue("L2", 2)); new EnumTypeDefinition("OrgLevel", new EnumValue("L1", 1), new EnumValue("L2", 2));
ts.defineEnumType(orgLevelEnum); ts.defineEnumType(orgLevelEnum);
StructTypeDefinition addressDetails = createStructTypeDef("Address",
createRequiredAttrDef("street", DataTypes.STRING_TYPE),
createRequiredAttrDef("city", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<ClassType> deptTypeDef = HierarchicalTypeDefinition<ClassType> deptTypeDef =
createClassTypeDef("Department", ImmutableList.<String>of(), createClassTypeDef("Department", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE), createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createOptionalAttrDef("orgLevel", ts.getDataType(EnumType.class, "OrgLevel")),
new AttributeDefinition("employees", new AttributeDefinition("employees",
String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true, String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true,
"department") "department")
...@@ -73,6 +77,8 @@ public final class TestUtils { ...@@ -73,6 +77,8 @@ public final class TestUtils {
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person", HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person",
ImmutableList.<String>of(), ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE), createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createOptionalAttrDef("orgLevel", ts.getDataType(EnumType.class, "OrgLevel")),
createOptionalAttrDef("address", "Address"),
new AttributeDefinition("department", new AttributeDefinition("department",
"Department", Multiplicity.REQUIRED, false, "employees"), "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", new AttributeDefinition("manager",
...@@ -92,7 +98,7 @@ public final class TestUtils { ...@@ -92,7 +98,7 @@ public final class TestUtils {
createRequiredAttrDef("level", DataTypes.INT_TYPE) createRequiredAttrDef("level", DataTypes.INT_TYPE)
); );
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(), ts.defineTypes(ImmutableList.of(addressDetails),
ImmutableList.of(securityClearanceTypeDef), ImmutableList.of(securityClearanceTypeDef),
ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef)); ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
} }
...@@ -101,12 +107,21 @@ public final class TestUtils { ...@@ -101,12 +107,21 @@ public final class TestUtils {
Referenceable hrDept = new Referenceable("Department"); Referenceable hrDept = new Referenceable("Department");
Referenceable john = new Referenceable("Person"); Referenceable john = new Referenceable("Person");
Referenceable jane = new Referenceable("Manager", "SecurityClearance"); Referenceable jane = new Referenceable("Manager", "SecurityClearance");
Referenceable johnAddr = new Referenceable("Address");
Referenceable janeAddr = new Referenceable("Address");
hrDept.set("name", "hr"); hrDept.set("name", "hr");
john.set("name", "John"); john.set("name", "John");
john.set("department", hrDept); john.set("department", hrDept);
johnAddr.set("street", "Stewart Drive");
johnAddr.set("city", "Sunnyvale");
john.set("address", johnAddr);
jane.set("name", "Jane"); jane.set("name", "Jane");
jane.set("department", hrDept); jane.set("department", hrDept);
janeAddr.set("street", "Great Americal Parkway");
janeAddr.set("city", "Santa Clara");
jane.set("address", janeAddr);
john.set("manager", jane); john.set("manager", jane);
......
...@@ -292,4 +292,32 @@ public class GraphBackedDiscoveryServiceTest { ...@@ -292,4 +292,32 @@ public class GraphBackedDiscoveryServiceTest {
Assert.assertNotEquals(name, "null"); Assert.assertNotEquals(name, "null");
} }
} }
@Test
public void testFullTextSearch() throws Exception {
//person in hr department whose name is john
String response = discoveryService.searchByFullText("john hr");
Assert.assertNotNull(response);
JSONObject jsonResponse = new JSONObject(response);
JSONArray results = jsonResponse.getJSONArray("results");
Assert.assertEquals(results.length(), 1);
JSONObject row = (JSONObject) results.get(0);
Assert.assertEquals(row.get("typeName"), "Person");
//person in hr department who lives in santa clara
response = discoveryService.searchByFullText("hr santa clara");
Assert.assertNotNull(response);
jsonResponse = new JSONObject(response);
results = jsonResponse.getJSONArray("results");
Assert.assertEquals(results.length(), 1);
row = (JSONObject) results.get(0);
Assert.assertEquals(row.get("typeName"), "Manager");
//search for hr should return - hr department and its 2 employess
response = discoveryService.searchByFullText("hr");
Assert.assertNotNull(response);
jsonResponse = new JSONObject(response);
results = jsonResponse.getJSONArray("results");
Assert.assertEquals(results.length(), 3);
}
} }
\ No newline at end of file
...@@ -195,7 +195,7 @@ public class EnumTest extends BaseTest { ...@@ -195,7 +195,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" + "\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-10\n" + "\tl : \t2014-12-11\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" + "\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" + "\to : \t{b=2.0, a=1.0}\n" +
...@@ -227,7 +227,7 @@ public class EnumTest extends BaseTest { ...@@ -227,7 +227,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" + "\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-10\n" + "\tl : \t2014-12-11\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" + "\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" + "\to : \t{b=2.0, a=1.0}\n" +
...@@ -264,7 +264,7 @@ public class EnumTest extends BaseTest { ...@@ -264,7 +264,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" + "\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-10\n" + "\tl : \t2014-12-11\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" + "\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" + "\to : \t{b=2.0, a=1.0}\n" +
......
...@@ -58,7 +58,7 @@ public class StructTest extends BaseTest { ...@@ -58,7 +58,7 @@ public class StructTest extends BaseTest {
"\ti : \t1.0\n" + "\ti : \t1.0\n" +
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tl : \t2014-12-10\n" + "\tl : \t2014-12-11\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" + "\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" + "\to : \t{b=2.0, a=1.0}\n" +
...@@ -101,7 +101,7 @@ public class StructTest extends BaseTest { ...@@ -101,7 +101,7 @@ public class StructTest extends BaseTest {
"\ti : \t1.0\n" + "\ti : \t1.0\n" +
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tl : \t2014-12-10\n" + "\tl : \t2014-12-11\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.100000000000000088817841970012523233890533447265625, 1" + "\tn : \t[1.100000000000000088817841970012523233890533447265625, 1" +
".100000000000000088817841970012523233890533447265625]\n" + ".100000000000000088817841970012523233890533447265625]\n" +
......
...@@ -34,6 +34,7 @@ import org.apache.hadoop.metadata.typesystem.types.ClassType; ...@@ -34,6 +34,7 @@ import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition; import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
...@@ -44,7 +45,7 @@ import javax.inject.Inject; ...@@ -44,7 +45,7 @@ import javax.inject.Inject;
import java.util.List; import java.util.List;
@Guice(modules = RepositoryMetadataModule.class) @Guice(modules = RepositoryMetadataModule.class)
public class GraphTypeStoreTest { public class GraphBackedTypeStoreTest {
@Inject @Inject
private GraphProvider<TitanGraph> graphProvider; private GraphProvider<TitanGraph> graphProvider;
...@@ -85,7 +86,9 @@ public class GraphTypeStoreTest { ...@@ -85,7 +86,9 @@ public class GraphTypeStoreTest {
Assert.assertEquals(1, enumTypes.size()); Assert.assertEquals(1, enumTypes.size());
//validate class //validate class
Assert.assertTrue(types.structTypesAsJavaList().isEmpty()); List<StructTypeDefinition> structTypes = types.structTypesAsJavaList();
Assert.assertEquals(1, structTypes.size());
List<HierarchicalTypeDefinition<ClassType>> classTypes = types.classTypesAsJavaList(); List<HierarchicalTypeDefinition<ClassType>> classTypes = types.classTypesAsJavaList();
Assert.assertEquals(3, classTypes.size()); Assert.assertEquals(3, classTypes.size());
for (HierarchicalTypeDefinition<ClassType> classType : classTypes) { for (HierarchicalTypeDefinition<ClassType> classType : classTypes) {
......
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
...@@ -19,11 +19,11 @@ ...@@ -19,11 +19,11 @@
######### Graph Database Configs ######### ######### Graph Database Configs #########
# Graph Storage # Graph Storage
metadata.graph.storage.backend=berkeleyje metadata.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=./data/berkeley metadata.graph.storage.directory=./target/data/berkeley
# Graph Search Index # Graph Search Index
metadata.graph.index.search.backend=elasticsearch metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=./data/es metadata.graph.index.search.directory=./target/data/es
metadata.graph.index.search.elasticsearch.client-only=false metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true metadata.graph.index.search.elasticsearch.local-mode=true
......
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
...@@ -35,13 +35,22 @@ import java.util.LinkedHashSet; ...@@ -35,13 +35,22 @@ import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
@Singleton @Singleton
@InterfaceAudience.Private @InterfaceAudience.Private
public class TypeSystem { public class TypeSystem {
private static final TypeSystem INSTANCE = new TypeSystem(); private static final TypeSystem INSTANCE = new TypeSystem();
public static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); public static ThreadLocal<DateFormat> dateFormat = new ThreadLocal() {
@Override
public DateFormat initialValue() {
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
return dateFormat;
}
};
private Map<String, IDataType> types; private Map<String, IDataType> types;
private IdType idType; private IdType idType;
...@@ -277,7 +286,7 @@ public class TypeSystem { ...@@ -277,7 +286,7 @@ public class TypeSystem {
} }
public DateFormat getDateFormat() { public DateFormat getDateFormat() {
return dateFormat; return dateFormat.get();
} }
public boolean allowNullsInCollections() { public boolean allowNullsInCollections() {
......
...@@ -26,6 +26,7 @@ import org.apache.hadoop.metadata.typesystem.types.EnumValue; ...@@ -26,6 +26,7 @@ import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.IDataType; import org.apache.hadoop.metadata.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.hadoop.metadata.typesystem.types.TraitType;
/** /**
...@@ -73,6 +74,10 @@ public class TypesUtil { ...@@ -73,6 +74,10 @@ public class TypesUtil {
return new HierarchicalTypeDefinition<>(TraitType.class, name, superTypes, attrDefs); return new HierarchicalTypeDefinition<>(TraitType.class, name, superTypes, attrDefs);
} }
public static StructTypeDefinition createStructTypeDef(String name, AttributeDefinition... attrDefs) {
return new StructTypeDefinition(name, attrDefs);
}
public static HierarchicalTypeDefinition<ClassType> createClassTypeDef( public static HierarchicalTypeDefinition<ClassType> createClassTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) { String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition<>(ClassType.class, name, superTypes, attrDefs); return new HierarchicalTypeDefinition<>(ClassType.class, name, superTypes, attrDefs);
......
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
...@@ -60,6 +60,27 @@ ...@@ -60,6 +60,27 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>${hadoop.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
<scope>test</scope>
</dependency>
<!-- supports simple auth handler -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.2.5</version>
</dependency>
<dependency>
<groupId>joda-time</groupId> <groupId>joda-time</groupId>
<artifactId>joda-time</artifactId> <artifactId>joda-time</artifactId>
</dependency> </dependency>
...@@ -158,18 +179,6 @@ ...@@ -158,18 +179,6 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>commons-io</groupId> <groupId>commons-io</groupId>
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
<version>2.4</version> <version>2.4</version>
...@@ -303,13 +312,13 @@ ...@@ -303,13 +312,13 @@
<!--only skip int tests --> <!--only skip int tests -->
<connectors> <connectors>
<!-- <!--
<connector implementation="org.mortbay.jetty.security.SslSocketConnector"> <connector implementation="org.mortbay.jetty.security.SslSocketConnector">
<port>21443</port> <port>21443</port>
<maxIdleTime>60000</maxIdleTime> <maxIdleTime>60000</maxIdleTime>
<keystore>${project.build.directory}/../../webapp/target/metadata.keystore</keystore> <keystore>${project.build.directory}/../../webapp/target/metadata.keystore</keystore>
<keyPassword>metadata-passwd</keyPassword> <keyPassword>metadata-passwd</keyPassword>
<password>metadata-passwd</password> <password>metadata-passwd</password>
</connector> </connector>
--> -->
<connector implementation="org.mortbay.jetty.nio.SelectChannelConnector"> <connector implementation="org.mortbay.jetty.nio.SelectChannelConnector">
<port>21000</port> <port>21000</port>
...@@ -358,14 +367,12 @@ ...@@ -358,14 +367,12 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.felix</groupId> <groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId> <artifactId>maven-bundle-plugin</artifactId>
<inherited>true</inherited> <inherited>true</inherited>
<extensions>true</extensions> <extensions>true</extensions>
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
</project> </project>
File mode changed from 100644 to 100755
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.web.filters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import java.io.IOException;
/**
* This enforces authentication as part of the filter before processing the request.
* todo: Subclass of {@link org.apache.hadoop.security.authentication.server.AuthenticationFilter}.
*/
public class AuthenticationFilter implements Filter {
private static final Logger LOG = LoggerFactory.getLogger(AuthenticationFilter.class);
@Override
public void init(FilterConfig filterConfig) throws ServletException {
LOG.info("AuthenticationFilter initialization started");
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
chain.doFilter(request, response);
}
@Override
public void destroy() {
// do nothing
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.web.filters;
import com.google.inject.Singleton;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.*;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.Properties;
/**
* This enforces authentication as part of the filter before processing the request.
* todo: Subclass of {@link org.apache.hadoop.security.authentication.server.AuthenticationFilter}.
*/
@Singleton
public class MetadataAuthenticationFilter extends AuthenticationFilter {
private static final Logger LOG = LoggerFactory.getLogger(MetadataAuthenticationFilter.class);
static final String PREFIX = "metadata.http.authentication.";
static final String BIND_ADDRESS = "bind.address";
@Override
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException {
PropertiesConfiguration configuration;
try {
configuration = new PropertiesConfiguration("application.properties");
} catch (ConfigurationException e) {
throw new ServletException(e);
}
Properties config = new Properties();
config.put(AuthenticationFilter.COOKIE_PATH, "/");
// add any config passed in as init parameters
Enumeration<String> enumeration = filterConfig.getInitParameterNames();
while (enumeration.hasMoreElements()) {
String name = enumeration.nextElement();
config.put(name, filterConfig.getInitParameter(name));
}
// transfer application.properties config items starting with defined prefix
Iterator<String> itor = configuration.getKeys();
while (itor.hasNext()) {
String name = itor.next();
if (name.startsWith(PREFIX)) {
String value = configuration.getString(name);
name = name.substring(PREFIX.length());
config.put(name, value);
}
}
//Resolve _HOST into bind address
String bindAddress = config.getProperty(BIND_ADDRESS);
if (bindAddress == null) {
LOG.info("No host name configured. Defaulting to local host name.");
try {
bindAddress = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
throw new ServletException("Unable to obtain host name", e);
}
}
String principal = config.getProperty(KerberosAuthenticationHandler.PRINCIPAL);
if (principal != null) {
try {
principal = SecurityUtil.getServerPrincipal(principal, bindAddress);
} catch (IOException ex) {
throw new RuntimeException("Could not resolve Kerberos principal name: " + ex.toString(), ex);
}
config.put(KerberosAuthenticationHandler.PRINCIPAL, principal);
}
return config;
}
}
...@@ -24,16 +24,21 @@ import com.google.inject.servlet.GuiceServletContextListener; ...@@ -24,16 +24,21 @@ import com.google.inject.servlet.GuiceServletContextListener;
import com.sun.jersey.api.core.PackagesResourceConfig; import com.sun.jersey.api.core.PackagesResourceConfig;
import com.sun.jersey.guice.JerseyServletModule; import com.sun.jersey.guice.JerseyServletModule;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.RepositoryMetadataModule; import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore; import org.apache.hadoop.metadata.repository.typestore.ITypeStore;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.web.filters.AuditFilter; import org.apache.hadoop.metadata.web.filters.AuditFilter;
import org.apache.hadoop.metadata.web.filters.MetadataAuthenticationFilter;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextEvent;
import javax.servlet.ServletException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
...@@ -42,6 +47,8 @@ public class GuiceServletConfig extends GuiceServletContextListener { ...@@ -42,6 +47,8 @@ public class GuiceServletConfig extends GuiceServletContextListener {
private static final Logger LOG = LoggerFactory.getLogger(GuiceServletConfig.class); private static final Logger LOG = LoggerFactory.getLogger(GuiceServletConfig.class);
private static final String GUICE_CTX_PARAM = "guice.packages"; private static final String GUICE_CTX_PARAM = "guice.packages";
static final String HTTP_AUTHENTICATION_ENABLED = "metadata.http.authentication.enabled";
private Injector injector;
@Override @Override
protected Injector getInjector() { protected Injector getInjector() {
...@@ -52,24 +59,39 @@ public class GuiceServletConfig extends GuiceServletContextListener { ...@@ -52,24 +59,39 @@ public class GuiceServletConfig extends GuiceServletContextListener {
* .11/contribs/jersey-guice/com/sun/jersey/guice/spi/container/servlet/package-summary * .11/contribs/jersey-guice/com/sun/jersey/guice/spi/container/servlet/package-summary
* .html * .html
*/ */
Injector injector = Guice.createInjector( if (injector == null) {
new RepositoryMetadataModule(), injector = Guice.createInjector(
new JerseyServletModule() { new RepositoryMetadataModule(),
@Override new JerseyServletModule() {
protected void configureServlets() { @Override
filter("/*").through(AuditFilter.class); protected void configureServlets() {
filter("/*").through(AuditFilter.class);
String packages = getServletContext().getInitParameter(GUICE_CTX_PARAM); try {
configureAuthenticationFilter();
LOG.info("Jersey loading from packages: " + packages); } catch (ConfigurationException e) {
LOG.warn("Unable to add and configure authentication filter", e);
Map<String, String> params = new HashMap<>(); }
params.put(PackagesResourceConfig.PROPERTY_PACKAGES, packages);
serve("/api/metadata/*").with(GuiceContainer.class, params); String packages = getServletContext().getInitParameter(GUICE_CTX_PARAM);
}
}); LOG.info("Jersey loading from packages: " + packages);
LOG.info("Guice modules loaded"); Map<String, String> params = new HashMap<>();
params.put(PackagesResourceConfig.PROPERTY_PACKAGES, packages);
serve("/api/metadata/*").with(GuiceContainer.class, params);
}
private void configureAuthenticationFilter() throws ConfigurationException {
PropertiesConfiguration configuration =
new PropertiesConfiguration("application.properties");
if (Boolean.valueOf(configuration.getString(HTTP_AUTHENTICATION_ENABLED))) {
filter("/*").through(MetadataAuthenticationFilter.class);
}
}
});
LOG.info("Guice modules loaded");
}
return injector; return injector;
} }
......
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.web;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.zookeeper.Environment;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.webapp.WebAppContext;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.nio.file.Files;
import java.util.Locale;
import java.util.Properties;
/**
*
*/
public class BaseSecurityTest {
private static final String JAAS_ENTRY =
"%s { \n"
+ " %s required\n"
// kerberos module
+ " keyTab=\"%s\"\n"
+ " debug=true\n"
+ " principal=\"%s\"\n"
+ " useKeyTab=true\n"
+ " useTicketCache=false\n"
+ " doNotPrompt=true\n"
+ " storeKey=true;\n"
+ "}; \n";
protected MiniKdc kdc;
protected String getWarPath() {
return String.format("/target/metadata-webapp-%s.war",
System.getProperty("release.version", "0.1-incubating-SNAPSHOT"));
}
protected void generateTestProperties(Properties props) throws ConfigurationException, IOException {
PropertiesConfiguration config = new PropertiesConfiguration(System.getProperty("user.dir") +
"/../src/conf/application.properties");
for (String propName : props.stringPropertyNames()) {
config.setProperty(propName, props.getProperty(propName));
}
File file = new File(System.getProperty("user.dir"), "application.properties");
file.deleteOnExit();
Writer fileWriter = new FileWriter(file);
config.save(fileWriter);
}
protected void startEmbeddedServer(Server server) throws Exception {
WebAppContext webapp = new WebAppContext();
webapp.setContextPath("/");
webapp.setWar(System.getProperty("user.dir") + getWarPath());
server.setHandler(webapp);
server.start();
}
protected File startKDC() throws Exception {
File target = Files.createTempDirectory("sectest").toFile();
File kdcWorkDir = new File(target, "kdc");
Properties kdcConf = MiniKdc.createConf();
kdcConf.setProperty(MiniKdc.DEBUG, "true");
kdc = new MiniKdc(kdcConf, kdcWorkDir);
kdc.start();
assert kdc.getRealm() != null;
return kdcWorkDir;
}
public String createJAASEntry(
String context,
String principal,
File keytab) {
String keytabpath = keytab.getAbsolutePath();
// fix up for windows; no-op on unix
keytabpath = keytabpath.replace('\\', '/');
return String.format(
Locale.ENGLISH,
JAAS_ENTRY,
context,
getKerberosAuthModuleForJVM(),
keytabpath,
principal);
}
protected String getKerberosAuthModuleForJVM() {
if (System.getProperty("java.vendor").contains("IBM")) {
return "com.ibm.security.auth.module.Krb5LoginModule";
} else {
return "com.sun.security.auth.module.Krb5LoginModule";
}
}
protected void bindJVMtoJAASFile(File jaasFile) {
String path = jaasFile.getAbsolutePath();
System.setProperty(Environment.JAAS_CONF_KEY, path);
}
protected File createKeytab(MiniKdc kdc, File kdcWorkDir, String principal, String filename) throws Exception {
File keytab = new File(kdcWorkDir, filename);
kdc.createPrincipal(keytab,
principal,
principal + "/localhost",
principal + "/127.0.0.1");
return keytab;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.web.filters;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.hdfs.web.URLConnectionFactory;
import org.apache.hadoop.metadata.web.BaseSecurityTest;
import org.apache.hadoop.metadata.web.service.EmbeddedServer;
import org.mortbay.jetty.Server;
import org.testng.annotations.Test;
import javax.security.auth.Subject;
import javax.security.auth.callback.*;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import java.io.File;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.security.PrivilegedExceptionAction;
import java.util.Properties;
/**
*
*/
public class MetadataAuthenticationKerberosFilterIT extends BaseSecurityTest {
public static final String TEST_USER_JAAS_SECTION = "TestUser";
public static final String TESTUSER = "testuser";
public static final String TESTPASS = "testpass";
private File userKeytabFile;
private File httpKeytabFile;
class TestEmbeddedServer extends EmbeddedServer {
public TestEmbeddedServer(int port, String path) throws IOException {
super(port, path);
}
Server getServer() {
return server;
}
}
@Test
public void testKerberosBasedLogin() throws Exception {
setupKDCAndPrincipals();
TestEmbeddedServer server = null;
try {
// setup the application.properties file
generateKerberosTestProperties();
// need to create the web application programmatically in order to control the injection of the test
// application properties
server = new TestEmbeddedServer(23000, "webapp/target/metadata-governance");
startEmbeddedServer(server.getServer());
final URLConnectionFactory connectionFactory = URLConnectionFactory.DEFAULT_SYSTEM_CONNECTION_FACTORY;
// attempt to hit server and get rejected
URL url = new URL("http://localhost:23000/");
HttpURLConnection connection = (HttpURLConnection) connectionFactory.openConnection(url, false);
connection.setRequestMethod("GET");
connection.connect();
assert connection.getResponseCode() == 401;
// need to populate the ticket cache with a local user, so logging in...
Subject subject = loginTestUser();
Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
// attempt to hit server and get rejected
URL url = new URL("http://localhost:23000/");
HttpURLConnection connection = (HttpURLConnection) connectionFactory.openConnection(url, true);
connection.setRequestMethod("GET");
connection.connect();
assert connection.getResponseCode() == 200;
return null;
}
});
} finally {
server.getServer().stop();
kdc.stop();
}
}
protected Subject loginTestUser() throws LoginException, IOException {
LoginContext lc = new LoginContext(TEST_USER_JAAS_SECTION, new CallbackHandler() {
@Override
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
for (int i = 0; i < callbacks.length; i++) {
if (callbacks[i] instanceof PasswordCallback) {
PasswordCallback passwordCallback = (PasswordCallback) callbacks[i];
passwordCallback.setPassword(TESTPASS.toCharArray());
}
if (callbacks[i] instanceof NameCallback) {
NameCallback nameCallback = (NameCallback) callbacks[i];
nameCallback.setName(TESTUSER);
}
}
}
});
// attempt authentication
lc.login();
return lc.getSubject();
}
protected void generateKerberosTestProperties() throws IOException, ConfigurationException {
Properties props = new Properties();
props.setProperty("metadata.http.authentication.enabled", "true");
props.setProperty("metadata.http.authentication.type", "kerberos");
props.setProperty("metadata.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm());
props.setProperty("metadata.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath());
props.setProperty("metadata.http.authentication.kerberos.name.rules",
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");
generateTestProperties(props);
}
public void setupKDCAndPrincipals() throws Exception {
// set up the KDC
File kdcWorkDir = startKDC();
userKeytabFile = createKeytab(kdc, kdcWorkDir, "dgi", "dgi.keytab");
httpKeytabFile = createKeytab(kdc, kdcWorkDir, "HTTP", "spnego.service.keytab");
// create a test user principal
kdc.createPrincipal(TESTUSER, TESTPASS);
StringBuilder jaas = new StringBuilder(1024);
jaas.append("TestUser {\n" +
" com.sun.security.auth.module.Krb5LoginModule required\nuseTicketCache=true;\n" +
"};\n");
jaas.append(createJAASEntry("Client", "dgi", userKeytabFile));
jaas.append(createJAASEntry("Server", "HTTP", httpKeytabFile));
File jaasFile = new File(kdcWorkDir, "jaas.txt");
FileUtils.write(jaasFile, jaas.toString());
bindJVMtoJAASFile(jaasFile);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.web.filters;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.hadoop.metadata.web.BaseSecurityTest;
import org.apache.hadoop.metadata.web.service.EmbeddedServer;
import org.mortbay.jetty.Server;
import org.testng.annotations.Test;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Properties;
/**
*
*/
public class MetadataAuthenticationSimpleFilterIT extends BaseSecurityTest {
class TestEmbeddedServer extends EmbeddedServer {
public TestEmbeddedServer(int port, String path) throws IOException {
super(port, path);
}
Server getServer() {
return server;
}
}
@Test
public void testSimpleLogin() throws Exception {
generateSimpleLoginConfiguration();
TestEmbeddedServer server = new TestEmbeddedServer(23001, "webapp/target/metadata-governance");
try {
startEmbeddedServer(server.getServer());
URL url = new URL("http://localhost:23001");
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("GET");
connection.connect();
try {
assert connection.getResponseCode() == 403;
} catch (Exception e) {
e.printStackTrace();
}
url = new URL("http://localhost:23001/?user.name=testuser");
connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("GET");
connection.connect();
assert connection.getResponseCode() == 200;
} finally {
server.getServer().stop();
}
}
protected void generateSimpleLoginConfiguration() throws IOException, ConfigurationException {
Properties config = new Properties();
config.setProperty("metadata.http.authentication.enabled", "true");
config.setProperty("metadata.http.authentication.type", "simple");
generateTestProperties(config);
}
}
...@@ -21,39 +21,21 @@ import org.apache.commons.configuration.PropertiesConfiguration; ...@@ -21,39 +21,21 @@ import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.metadata.web.BaseSecurityTest;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell;
import org.apache.zookeeper.Environment;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.io.File; import java.io.File;
import java.nio.file.Files;
import java.util.Locale;
import java.util.Properties;
/** /**
* *
*/ */
public class LoginProcessorIT { public class LoginProcessorIT extends BaseSecurityTest {
private static final String JAAS_ENTRY =
"%s { \n"
+ " %s required\n"
// kerberos module
+ " keyTab=\"%s\"\n"
+ " debug=true\n"
+ " principal=\"%s\"\n"
+ " useKeyTab=true\n"
+ " useTicketCache=false\n"
+ " doNotPrompt=true\n"
+ " storeKey=true;\n"
+ "}; \n";
protected static final String kerberosRule = protected static final String kerberosRule =
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT"; "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT";
private MiniKdc kdc;
@Test @Test
public void testDefaultSimpleLogin() throws Exception { public void testDefaultSimpleLogin() throws Exception {
LoginProcessor processor = new LoginProcessor() { LoginProcessor processor = new LoginProcessor() {
...@@ -111,12 +93,7 @@ public class LoginProcessorIT { ...@@ -111,12 +93,7 @@ public class LoginProcessorIT {
private File setupKDCAndPrincipals() throws Exception { private File setupKDCAndPrincipals() throws Exception {
// set up the KDC // set up the KDC
File target = Files.createTempDirectory("sectest").toFile(); File kdcWorkDir = startKDC();
File kdcWorkDir = new File(target, "kdc");
Properties kdcConf = MiniKdc.createConf();
kdcConf.setProperty(MiniKdc.DEBUG, "true");
kdc = new MiniKdc(kdcConf, kdcWorkDir);
kdc.start();
assert kdc.getRealm() != null; assert kdc.getRealm() != null;
...@@ -134,41 +111,4 @@ public class LoginProcessorIT { ...@@ -134,41 +111,4 @@ public class LoginProcessorIT {
return keytabFile; return keytabFile;
} }
private File createKeytab(MiniKdc kdc, File kdcWorkDir, String principal, String filename) throws Exception {
File keytab = new File(kdcWorkDir, filename);
kdc.createPrincipal(keytab,
principal,
principal + "/localhost",
principal + "/127.0.0.1");
return keytab;
}
public String createJAASEntry(
String context,
String principal,
File keytab) {
String keytabpath = keytab.getAbsolutePath();
// fix up for windows; no-op on unix
keytabpath = keytabpath.replace('\\', '/');
return String.format(
Locale.ENGLISH,
JAAS_ENTRY,
context,
getKerberosAuthModuleForJVM(),
keytabpath,
principal);
}
private String getKerberosAuthModuleForJVM() {
if (System.getProperty("java.vendor").contains("IBM")) {
return "com.ibm.security.auth.module.Krb5LoginModule";
} else {
return "com.sun.security.auth.module.Krb5LoginModule";
}
}
private void bindJVMtoJAASFile(File jaasFile) {
String path = jaasFile.getAbsolutePath();
System.setProperty(Environment.JAAS_CONF_KEY, path);
}
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment