Commit 9cea4f64 by dileep bhimineni

merge

merged conflicts Merge branch 'master' into HDPDGI-34-1 Conflicts: dashboard/v2/public/css/common.css dashboard/v2/public/modules/details/views/attribute.html dashboard/v2/public/modules/details/views/details.html dashboard/v2/public/modules/details/views/schema.html dashboard/v2/public/modules/home/views/header.html dashboard/v2/public/modules/lineage/lineageController.js dashboard/v2/public/modules/navigation/navigationController.js dashboard/v2/public/modules/navigation/navigationModule.js dashboard/v2/public/modules/search/searchController.js dashboard/v2/public/modules/search/searchRoutes.js dashboard/v2/public/modules/search/views/search.html dashboard/v2/public/modules/search/views/searchResult.html dashboard/v2/public/views/includes/foot.html dashboard/v2/public/views/includes/head.html
parents dd61e708 cbf662db
...@@ -33,32 +33,13 @@ ...@@ -33,32 +33,13 @@
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
<hive.version>1.1.0</hive.version> <hive.version>1.2.0</hive.version>
<calcite.version>0.9.2-incubating</calcite.version> <calcite.version>0.9.2-incubating</calcite.version>
<hadoop.version>2.6.0</hadoop.version> <hadoop.version>2.6.0</hadoop.version>
</properties> </properties>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId>
<version>${version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
<scope>runtime</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-typesystem</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId> <artifactId>hadoop-minikdc</artifactId>
<version>${hadoop.version}</version> <version>${hadoop.version}</version>
...@@ -99,6 +80,25 @@ ...@@ -99,6 +80,25 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId>
<version>${version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
<scope>runtime</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-typesystem</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version> <version>${hadoop.version}</version>
......
...@@ -30,15 +30,13 @@ import org.apache.hadoop.hive.ql.metadata.Hive; ...@@ -30,15 +30,13 @@ import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.MetadataServiceException;
import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator; import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes; import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization; import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.Serialization;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
...@@ -55,20 +53,9 @@ import java.util.Set; ...@@ -55,20 +53,9 @@ import java.util.Set;
*/ */
public class HiveMetaStoreBridge { public class HiveMetaStoreBridge {
private static final String DEFAULT_DGI_URL = "http://localhost:21000/"; private static final String DEFAULT_DGI_URL = "http://localhost:21000/";
public static final String HIVE_CLUSTER_NAME = "hive.cluster.name";
public static class Pair<S, T> { public static final String DEFAULT_CLUSTER_NAME = "primary";
public S first; private final String clusterName;
public T second;
public Pair(S first, T second) {
this.first = first;
this.second = second;
}
public static <S, T> Pair of(S first, T second) {
return new Pair(first, second);
}
}
public static final String DGI_URL_PROPERTY = "hive.hook.dgi.url"; public static final String DGI_URL_PROPERTY = "hive.hook.dgi.url";
...@@ -82,6 +69,7 @@ public class HiveMetaStoreBridge { ...@@ -82,6 +69,7 @@ public class HiveMetaStoreBridge {
* @param hiveConf * @param hiveConf
*/ */
public HiveMetaStoreBridge(HiveConf hiveConf) throws Exception { public HiveMetaStoreBridge(HiveConf hiveConf) throws Exception {
clusterName = hiveConf.get(HIVE_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);
hiveClient = Hive.get(hiveConf); hiveClient = Hive.get(hiveConf);
metadataServiceClient = new MetadataServiceClient(hiveConf.get(DGI_URL_PROPERTY, DEFAULT_DGI_URL)); metadataServiceClient = new MetadataServiceClient(hiveConf.get(DGI_URL_PROPERTY, DEFAULT_DGI_URL));
} }
...@@ -104,35 +92,15 @@ public class HiveMetaStoreBridge { ...@@ -104,35 +92,15 @@ public class HiveMetaStoreBridge {
} }
} }
/**
* Gets reference for the database
*
* @param dbName database name
* @return Reference for database if exists, else null
* @throws Exception
*/
private Referenceable getDatabaseReference(String dbName) throws Exception {
LOG.debug("Getting reference for database {}", dbName);
String typeName = HiveDataTypes.HIVE_DB.getName();
MetadataServiceClient dgiClient = getMetadataServiceClient();
JSONArray results = dgiClient.rawSearch(typeName, "name", dbName);
if (results.length() == 0) {
return null;
} else {
String guid = getGuidFromDSLResponse(results.getJSONObject(0));
return new Referenceable(guid, typeName, null);
}
}
public Referenceable registerDatabase(String databaseName) throws Exception { public Referenceable registerDatabase(String databaseName) throws Exception {
Referenceable dbRef = getDatabaseReference(databaseName); Referenceable dbRef = getDatabaseReference(databaseName, clusterName);
if (dbRef == null) { if (dbRef == null) {
LOG.info("Importing objects from databaseName : " + databaseName); LOG.info("Importing objects from databaseName : " + databaseName);
Database hiveDB = hiveClient.getDatabase(databaseName); Database hiveDB = hiveClient.getDatabase(databaseName);
dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName()); dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName());
dbRef.set("name", hiveDB.getName()); dbRef.set("name", hiveDB.getName());
dbRef.set("clusterName", clusterName);
dbRef.set("description", hiveDB.getDescription()); dbRef.set("description", hiveDB.getDescription());
dbRef.set("locationUri", hiveDB.getLocationUri()); dbRef.set("locationUri", hiveDB.getLocationUri());
dbRef.set("parameters", hiveDB.getParameters()); dbRef.set("parameters", hiveDB.getParameters());
...@@ -155,7 +123,7 @@ public class HiveMetaStoreBridge { ...@@ -155,7 +123,7 @@ public class HiveMetaStoreBridge {
String entityJSON = InstanceSerialization.toJson(referenceable, true); String entityJSON = InstanceSerialization.toJson(referenceable, true);
LOG.debug("Submitting new entity {} = {}", referenceable.getTypeName(), entityJSON); LOG.debug("Submitting new entity {} = {}", referenceable.getTypeName(), entityJSON);
JSONObject jsonObject = metadataServiceClient.createEntity(entityJSON); JSONObject jsonObject = metadataServiceClient.createEntity(entityJSON);
String guid = jsonObject.getString(MetadataServiceClient.RESULTS); String guid = jsonObject.getString(MetadataServiceClient.GUID);
LOG.debug("created instance for type " + typeName + ", guid: " + guid); LOG.debug("created instance for type " + typeName + ", guid: " + guid);
return new Referenceable(guid, referenceable.getTypeName(), null); return new Referenceable(guid, referenceable.getTypeName(), null);
...@@ -168,7 +136,7 @@ public class HiveMetaStoreBridge { ...@@ -168,7 +136,7 @@ public class HiveMetaStoreBridge {
Referenceable tableReferenceable = registerTable(databaseReferenceable, databaseName, tableName); Referenceable tableReferenceable = registerTable(databaseReferenceable, databaseName, tableName);
// Import Partitions // Import Partitions
Referenceable sdReferenceable = getSDForTable(databaseReferenceable, tableName); Referenceable sdReferenceable = getSDForTable(databaseName, tableName);
importPartitions(databaseName, tableName, databaseReferenceable, tableReferenceable, sdReferenceable); importPartitions(databaseName, tableName, databaseReferenceable, tableReferenceable, sdReferenceable);
// Import Indexes // Import Indexes
...@@ -177,42 +145,97 @@ public class HiveMetaStoreBridge { ...@@ -177,42 +145,97 @@ public class HiveMetaStoreBridge {
} }
/** /**
* Gets reference for the database
*
*
* @param databaseName
* @param clusterName cluster name
* @return Reference for database if exists, else null
* @throws Exception
*/
private Referenceable getDatabaseReference(String databaseName, String clusterName) throws Exception {
LOG.debug("Getting reference for database {}", databaseName);
String typeName = HiveDataTypes.HIVE_DB.getName();
String dslQuery = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(),
databaseName, clusterName);
return getEntityReferenceFromDSL(typeName, dslQuery);
}
private Referenceable getEntityReferenceFromDSL(String typeName, String dslQuery) throws Exception {
MetadataServiceClient dgiClient = getMetadataServiceClient();
JSONArray results = dgiClient.searchByDSL(dslQuery);
if (results.length() == 0) {
return null;
} else {
String guid = getGuidFromDSLResponse(results.getJSONObject(0));
return new Referenceable(guid, typeName, null);
}
}
/**
* Gets reference for the table * Gets reference for the table
* *
* @param dbRef * @param dbName
* @param tableName table name * @param tableName table name
* @return table reference if exists, else null * @return table reference if exists, else null
* @throws Exception * @throws Exception
*/ */
private Referenceable getTableReference(Referenceable dbRef, String tableName) throws Exception { private Referenceable getTableReference(String dbName, String tableName) throws Exception {
LOG.debug("Getting reference for table {}.{}", dbRef, tableName); LOG.debug("Getting reference for table {}.{}", dbName, tableName);
String typeName = HiveDataTypes.HIVE_TABLE.getName(); String typeName = HiveDataTypes.HIVE_TABLE.getName();
MetadataServiceClient dgiClient = getMetadataServiceClient();
//todo DSL support for reference doesn't work. is the usage right? // String dslQuery = String.format("%s as t where name = '%s' dbName where name = '%s' and "
// String query = String.format("%s where dbName = \"%s\" and tableName = \"%s\"", typeName, dbRef.getId().id, // + "clusterName = '%s' select t",
// tableName); // HiveDataTypes.HIVE_TABLE.getName(), tableName, dbName, clusterName);
String query = String.format("%s where name = \"%s\"", typeName, tableName); String dbType = HiveDataTypes.HIVE_DB.getName();
JSONArray results = dgiClient.searchByDSL(query);
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.name', '%s').as('t').out"
+ "('__%s.dbName').has('%s.name', '%s').has('%s.clusterName', '%s').back('t').toList()",
typeName, typeName, tableName, typeName, dbType, dbName, dbType, clusterName);
return getEntityReferenceFromGremlin(typeName, gremlinQuery);
}
private Referenceable getEntityReferenceFromGremlin(String typeName, String gremlinQuery) throws MetadataServiceException,
JSONException {
MetadataServiceClient client = getMetadataServiceClient();
JSONObject response = client.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
if (results.length() == 0) { if (results.length() == 0) {
return null; return null;
} else {
//There should be just one instance with the given name
String guid = getGuidFromDSLResponse(results.getJSONObject(0));
LOG.debug("Got reference for table {}.{} = {}", dbRef, tableName, guid);
return new Referenceable(guid, typeName, null);
} }
String guid = results.getJSONObject(0).getString("__guid");
return new Referenceable(guid, typeName, null);
}
private Referenceable getPartitionReference(String dbName, String tableName, List<String> values) throws Exception {
String valuesStr = "['" + StringUtils.join(values, "', '") + "']";
LOG.debug("Getting reference for partition for {}.{} with values {}", dbName, tableName, valuesStr);
String typeName = HiveDataTypes.HIVE_PARTITION.getName();
// String dslQuery = String.format("%s as p where values = %s, tableName where name = '%s', "
// + "dbName where name = '%s' and clusterName = '%s' select p", typeName, valuesStr, tableName,
// dbName, clusterName);
String dbType = HiveDataTypes.HIVE_DB.getName();
String tableType = HiveDataTypes.HIVE_TABLE.getName();
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.values', %s).as('p')."
+ "out('__%s.tableName').has('%s.name', '%s').out('__%s.dbName').has('%s.name', '%s')"
+ ".has('%s.clusterName', '%s').back('p').toList()", typeName, typeName, valuesStr, typeName,
tableType, tableName, tableType, dbType, dbName, dbType, clusterName);
return getEntityReferenceFromGremlin(typeName, gremlinQuery);
} }
private String getGuidFromDSLResponse(JSONObject jsonObject) throws JSONException { private String getGuidFromDSLResponse(JSONObject jsonObject) throws JSONException {
return jsonObject.getJSONObject("$id$").getString("id"); return jsonObject.getJSONObject("$id$").getString("id");
} }
private Referenceable getSDForTable(Referenceable dbRef, String tableName) throws Exception { private Referenceable getSDForTable(String dbName, String tableName) throws Exception {
Referenceable tableRef = getTableReference(dbRef, tableName); Referenceable tableRef = getTableReference(dbName, tableName);
if (tableRef == null) { if (tableRef == null) {
throw new IllegalArgumentException("Table " + dbRef + "." + tableName + " doesn't exist"); throw new IllegalArgumentException("Table " + dbName + "." + tableName + " doesn't exist");
} }
MetadataServiceClient dgiClient = getMetadataServiceClient(); MetadataServiceClient dgiClient = getMetadataServiceClient();
...@@ -228,7 +251,7 @@ public class HiveMetaStoreBridge { ...@@ -228,7 +251,7 @@ public class HiveMetaStoreBridge {
public Referenceable registerTable(Referenceable dbReference, String dbName, String tableName) throws Exception { public Referenceable registerTable(Referenceable dbReference, String dbName, String tableName) throws Exception {
LOG.info("Attempting to register table [" + tableName + "]"); LOG.info("Attempting to register table [" + tableName + "]");
Referenceable tableRef = getTableReference(dbReference, tableName); Referenceable tableRef = getTableReference(dbName, tableName);
if (tableRef == null) { if (tableRef == null) {
LOG.info("Importing objects from " + dbName + "." + tableName); LOG.info("Importing objects from " + dbName + "." + tableName);
...@@ -302,31 +325,48 @@ public class HiveMetaStoreBridge { ...@@ -302,31 +325,48 @@ public class HiveMetaStoreBridge {
} }
} }
//todo should be idempotent public Referenceable registerPartition(Partition partition) throws Exception {
String dbName = partition.getTable().getDbName();
String tableName = partition.getTable().getTableName();
Referenceable dbRef = registerDatabase(dbName);
Referenceable tableRef = registerTable(dbName, tableName);
Referenceable sdRef = getSDForTable(dbName, tableName);
return importPartition(partition, dbRef, tableRef, sdRef);
}
private Referenceable importPartition(Partition hivePart, private Referenceable importPartition(Partition hivePart,
Referenceable dbReferenceable, Referenceable dbReferenceable,
Referenceable tableReferenceable, Referenceable tableReferenceable,
Referenceable sdReferenceable) throws Exception { Referenceable sdReferenceable) throws Exception {
LOG.info("Importing partition for {}.{} with values {}", dbReferenceable, tableReferenceable, LOG.info("Importing partition for {}.{} with values {}", dbReferenceable, tableReferenceable,
StringUtils.join(hivePart.getValues(), ",")); StringUtils.join(hivePart.getValues(), ","));
Referenceable partRef = new Referenceable(HiveDataTypes.HIVE_PARTITION.getName()); String dbName = hivePart.getTable().getDbName();
partRef.set("values", hivePart.getValues()); String tableName = hivePart.getTable().getTableName();
partRef.set("dbName", dbReferenceable); Referenceable partRef = getPartitionReference(dbName, tableName, hivePart.getValues());
partRef.set("tableName", tableReferenceable); if (partRef == null) {
partRef = new Referenceable(HiveDataTypes.HIVE_PARTITION.getName());
partRef.set("values", hivePart.getValues());
//todo fix partRef.set("dbName", dbReferenceable);
partRef.set("createTime", hivePart.getLastAccessTime()); partRef.set("tableName", tableReferenceable);
partRef.set("lastAccessTime", hivePart.getLastAccessTime());
// sdStruct = fillStorageDescStruct(hivePart.getSd()); //todo fix
// Instead of creating copies of the sdstruct for partitions we are reusing existing partRef.set("createTime", hivePart.getLastAccessTime());
// ones will fix to identify partitions with differing schema. partRef.set("lastAccessTime", hivePart.getLastAccessTime());
partRef.set("sd", sdReferenceable);
partRef.set("parameters", hivePart.getParameters()); // sdStruct = fillStorageDescStruct(hivePart.getSd());
// Instead of creating copies of the sdstruct for partitions we are reusing existing
// ones will fix to identify partitions with differing schema.
partRef.set("sd", sdReferenceable);
return createInstance(partRef); partRef.set("parameters", hivePart.getParameters());
partRef = createInstance(partRef);
} else {
LOG.info("Partition {}.{} with values {} is already registered with id {}", dbName, tableName,
StringUtils.join(hivePart.getValues(), ","), partRef.getId().id);
}
return partRef;
} }
private void importIndexes(String db, String table, private void importIndexes(String db, String table,
......
...@@ -37,7 +37,6 @@ package org.apache.hadoop.metadata.hive.hook; ...@@ -37,7 +37,6 @@ package org.apache.hadoop.metadata.hive.hook;
import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.antlr.runtime.tree.Tree;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.ExplainTask;
...@@ -48,33 +47,16 @@ import org.apache.hadoop.hive.ql.hooks.HookContext; ...@@ -48,33 +47,16 @@ import org.apache.hadoop.hive.ql.hooks.HookContext;
import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHook;
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge; import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes; import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.codehaus.jettison.json.JSONArray; import org.json.JSONObject;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
...@@ -84,7 +66,7 @@ import java.util.concurrent.TimeUnit; ...@@ -84,7 +66,7 @@ import java.util.concurrent.TimeUnit;
/** /**
* DgiHook sends lineage information to the DgiSever. * DgiHook sends lineage information to the DgiSever.
*/ */
public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHook { public class HiveHook implements ExecuteWithHookContext {
private static final Logger LOG = LoggerFactory.getLogger(HiveHook.class); private static final Logger LOG = LoggerFactory.getLogger(HiveHook.class);
...@@ -115,17 +97,12 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -115,17 +97,12 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
int maxThreads = hiveConf.getInt(MAX_THREADS, maxThreadsDefault); int maxThreads = hiveConf.getInt(MAX_THREADS, maxThreadsDefault);
long keepAliveTime = hiveConf.getLong(KEEP_ALIVE_TIME, keepAliveTimeDefault); long keepAliveTime = hiveConf.getLong(KEEP_ALIVE_TIME, keepAliveTimeDefault);
executor = new ThreadPoolExecutor(minThreads, maxThreads, executor = new ThreadPoolExecutor(minThreads, maxThreads, keepAliveTime, TimeUnit.MILLISECONDS,
keepAliveTime, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>(), new LinkedBlockingQueue<Runnable>(),
new ThreadFactoryBuilder() new ThreadFactoryBuilder().setDaemon(true).setNameFormat("DGI Logger %d").build());
.setDaemon(true)
.setNameFormat("DGI Logger %d")
.build());
try { try {
Runtime.getRuntime().addShutdownHook( Runtime.getRuntime().addShutdownHook(new Thread() {
new Thread() {
@Override @Override
public void run() { public void run() {
try { try {
...@@ -137,8 +114,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -137,8 +114,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
} }
// shutdown client // shutdown client
} }
} });
);
} catch (IllegalStateException is) { } catch (IllegalStateException is) {
LOG.info("Attempting to send msg while shutdown in progress."); LOG.info("Attempting to send msg while shutdown in progress.");
} }
...@@ -146,6 +122,19 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -146,6 +122,19 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
LOG.info("Created DGI Hook"); LOG.info("Created DGI Hook");
} }
class HiveEvent {
public HiveConf conf;
public Set<ReadEntity> inputs;
public Set<WriteEntity> outputs;
public String user;
public HiveOperation operation;
public QueryPlan queryPlan;
public HookContext.HookType hookType;
public JSONObject jsonPlan;
}
@Override @Override
public void run(final HookContext hookContext) throws Exception { public void run(final HookContext hookContext) throws Exception {
if (executor == null) { if (executor == null) {
...@@ -154,104 +143,118 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -154,104 +143,118 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
} }
// clone to avoid concurrent access // clone to avoid concurrent access
final HiveEvent event = new HiveEvent();
final HiveConf conf = new HiveConf(hookContext.getConf()); final HiveConf conf = new HiveConf(hookContext.getConf());
boolean debug = conf.get("hive.hook.dgi.synchronous", "false").equals("true"); boolean debug = conf.get("hive.hook.dgi.synchronous", "false").equals("true");
event.conf = conf;
event.inputs = hookContext.getInputs();
event.outputs = hookContext.getOutputs();
event.user = hookContext.getUserName() == null ? hookContext.getUgi().getUserName() : hookContext.getUserName();
event.operation = HiveOperation.valueOf(hookContext.getOperationName());
event.queryPlan = hookContext.getQueryPlan();
event.hookType = hookContext.getHookType();
//todo throws NPE
// event.jsonPlan = getQueryPlan(event);
event.jsonPlan = new JSONObject();
if (debug) { if (debug) {
fireAndForget(hookContext, conf); fireAndForget(event);
} else { } else {
executor.submit( executor.submit(new Runnable() {
new Runnable() {
@Override @Override
public void run() { public void run() {
try { try {
fireAndForget(hookContext, conf); fireAndForget(event);
} catch (Throwable e) { } catch (Throwable e) {
LOG.info("DGI hook failed", e); LOG.info("DGI hook failed", e);
} }
} }
} });
);
} }
} }
private void fireAndForget(HookContext hookContext, HiveConf conf) throws Exception { private void fireAndForget(HiveEvent event) throws Exception {
assert hookContext.getHookType() == HookContext.HookType.POST_EXEC_HOOK : "Non-POST_EXEC_HOOK not supported!"; assert event.hookType == HookContext.HookType.POST_EXEC_HOOK : "Non-POST_EXEC_HOOK not supported!";
LOG.info("Entered DGI hook for hook type {} operation {}", hookContext.getHookType(),
hookContext.getOperationName());
HiveOperation operation = HiveOperation.valueOf(hookContext.getOperationName());
HiveMetaStoreBridge dgiBridge = new HiveMetaStoreBridge(conf); LOG.info("Entered DGI hook for hook type {} operation {}", event.hookType, event.operation);
HiveMetaStoreBridge dgiBridge = new HiveMetaStoreBridge(event.conf);
if (!typesRegistered) { if (!typesRegistered) {
dgiBridge.registerHiveDataModel(); dgiBridge.registerHiveDataModel();
typesRegistered = true; typesRegistered = true;
} }
switch (operation) { switch (event.operation) {
case CREATEDATABASE: case CREATEDATABASE:
Set<WriteEntity> outputs = hookContext.getOutputs(); handleCreateDB(dgiBridge, event);
for (WriteEntity entity : outputs) { break;
if (entity.getType() == Entity.Type.DATABASE) {
dgiBridge.registerDatabase(entity.getDatabase().getName()); case CREATETABLE:
} handleCreateTable(dgiBridge, event);
} break;
break;
case CREATETABLE_AS_SELECT:
case CREATETABLE: case CREATEVIEW:
outputs = hookContext.getOutputs(); case LOAD:
for (WriteEntity entity : outputs) { case EXPORT:
if (entity.getType() == Entity.Type.TABLE) { case IMPORT:
case QUERY:
Table table = entity.getTable(); registerProcess(dgiBridge, event);
//TODO table.getDbName().toLowerCase() is required as hive stores in lowercase, break;
// but table.getDbName() is not lowercase
Referenceable dbReferenceable = dgiBridge.registerDatabase(table.getDbName().toLowerCase()); default:
dgiBridge.registerTable(dbReferenceable, table.getDbName(), table.getTableName()); }
} }
}
break; private void handleCreateTable(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception {
for (WriteEntity entity : event.outputs) {
case CREATETABLE_AS_SELECT: if (entity.getType() == Entity.Type.TABLE) {
registerCTAS(dgiBridge, hookContext, conf);
break; Table table = entity.getTable();
//TODO table.getDbName().toLowerCase() is required as hive stores in lowercase,
default: // but table.getDbName() is not lowercase
Referenceable dbReferenceable = dgiBridge.registerDatabase(table.getDbName().toLowerCase());
dgiBridge.registerTable(dbReferenceable, table.getDbName(), table.getTableName());
}
}
}
private void handleCreateDB(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception {
for (WriteEntity entity : event.outputs) {
if (entity.getType() == Entity.Type.DATABASE) {
dgiBridge.registerDatabase(entity.getDatabase().getName());
}
} }
} }
private void registerCTAS(HiveMetaStoreBridge dgiBridge, HookContext hookContext, HiveConf conf) throws Exception { private void registerProcess(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception {
Set<ReadEntity> inputs = hookContext.getInputs(); Set<ReadEntity> inputs = event.inputs;
Set<WriteEntity> outputs = hookContext.getOutputs(); Set<WriteEntity> outputs = event.outputs;
//Even explain CTAS has operation name as CREATETABLE_AS_SELECT //Even explain CTAS has operation name as CREATETABLE_AS_SELECT
if (inputs.isEmpty() && outputs.isEmpty()) { if (inputs.isEmpty() && outputs.isEmpty()) {
LOG.info("Explain statement. Skipping..."); LOG.info("Explain statement. Skipping...");
} }
//todo hookContext.getUserName() is null in hdp sandbox 2.2.4 if (event.queryPlan == null) {
String user = hookContext.getUserName() == null ? System.getProperty("user.name") : hookContext.getUserName(); LOG.info("Query plan is missing. Skipping...");
HiveOperation operation = HiveOperation.valueOf(hookContext.getOperationName());
String queryId = null;
String queryStr = null;
long queryStartTime = 0;
QueryPlan plan = hookContext.getQueryPlan();
if (plan != null) {
queryId = plan.getQueryId();
queryStr = plan.getQueryString();
queryStartTime = plan.getQueryStartTime();
} }
String queryId = event.queryPlan.getQueryId();
String queryStr = event.queryPlan.getQueryStr();
long queryStartTime = event.queryPlan.getQueryStartTime();
LOG.debug("Registering CTAS query: {}", queryStr); LOG.debug("Registering CTAS query: {}", queryStr);
Referenceable processReferenceable = new Referenceable(HiveDataTypes.HIVE_PROCESS.getName()); Referenceable processReferenceable = new Referenceable(HiveDataTypes.HIVE_PROCESS.getName());
processReferenceable.set("name", operation.getOperationName()); processReferenceable.set("name", event.operation.getOperationName());
processReferenceable.set("startTime", queryStartTime); processReferenceable.set("startTime", queryStartTime);
processReferenceable.set("userName", user); processReferenceable.set("userName", event.user);
List<Referenceable> source = new ArrayList<>(); List<Referenceable> source = new ArrayList<>();
for (ReadEntity readEntity : inputs) { for (ReadEntity readEntity : inputs) {
if (readEntity.getTyp() == Entity.Type.TABLE) { if (readEntity.getType() == Entity.Type.TABLE) {
Table table = readEntity.getTable(); Table table = readEntity.getTable();
String dbName = table.getDbName().toLowerCase(); String dbName = table.getDbName().toLowerCase();
source.add(dgiBridge.registerTable(dbName, table.getTableName())); source.add(dgiBridge.registerTable(dbName, table.getTableName()));
...@@ -260,16 +263,19 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -260,16 +263,19 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
processReferenceable.set("inputTables", source); processReferenceable.set("inputTables", source);
List<Referenceable> target = new ArrayList<>(); List<Referenceable> target = new ArrayList<>();
for (WriteEntity writeEntity : outputs) { for (WriteEntity writeEntity : outputs) {
if (writeEntity.getTyp() == Entity.Type.TABLE) { if (writeEntity.getType() == Entity.Type.TABLE || writeEntity.getType() == Entity.Type.PARTITION) {
Table table = writeEntity.getTable(); Table table = writeEntity.getTable();
String dbName = table.getDbName().toLowerCase(); String dbName = table.getDbName().toLowerCase();
target.add(dgiBridge.registerTable(dbName, table.getTableName())); target.add(dgiBridge.registerTable(dbName, table.getTableName()));
} }
if (writeEntity.getType() == Entity.Type.PARTITION) {
dgiBridge.registerPartition(writeEntity.getPartition());
}
} }
processReferenceable.set("outputTables", target); processReferenceable.set("outputTables", target);
processReferenceable.set("queryText", queryStr); processReferenceable.set("queryText", queryStr);
processReferenceable.set("queryId", queryId); processReferenceable.set("queryId", queryId);
processReferenceable.set("queryPlan", getQueryPlan(hookContext, conf)); processReferenceable.set("queryPlan", event.jsonPlan.toString());
processReferenceable.set("endTime", System.currentTimeMillis()); processReferenceable.set("endTime", System.currentTimeMillis());
//TODO set //TODO set
...@@ -278,234 +284,10 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -278,234 +284,10 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
} }
private String getQueryPlan(HookContext hookContext, HiveConf conf) throws Exception { private JSONObject getQueryPlan(HiveEvent event) throws Exception {
//We need to somehow get the sem associated with the plan and use it here.
MySemanticAnaylzer sem = new MySemanticAnaylzer(conf);
QueryPlan queryPlan = hookContext.getQueryPlan();
sem.setInputs(queryPlan.getInputs());
ExplainWork ew = new ExplainWork(null, null, queryPlan.getRootTasks(), queryPlan.getFetchTask(), null, sem,
false, true, false, false, false);
ExplainTask explain = new ExplainTask(); ExplainTask explain = new ExplainTask();
explain.initialize(conf, queryPlan, null); explain.initialize(event.conf, event.queryPlan, null);
List<Task<?>> rootTasks = event.queryPlan.getRootTasks();
org.json.JSONObject explainPlan = explain.getJSONPlan(null, ew); return explain.getJSONPlan(null, null, rootTasks, event.queryPlan.getFetchTask(), true, false, false);
return explainPlan.toString();
}
private void analyzeHiveParseTree(ASTNode ast) {
String astStr = ast.dump();
Tree tab = ast.getChild(0);
String fullTableName;
boolean isExternal = false;
boolean isTemporary = false;
String inputFormat = null;
String outputFormat = null;
String serde = null;
String storageHandler = null;
String likeTableName = null;
String comment = null;
Tree ctasNode = null;
Tree rowFormatNode = null;
String location = null;
Map<String, String> serdeProps = new HashMap<>();
try {
BufferedWriter fw = new BufferedWriter(
new FileWriter(new File("/tmp/dgi/", "ASTDump"), true));
fw.write("Full AST Dump" + astStr);
switch (ast.getToken().getType()) {
case HiveParser.TOK_CREATETABLE:
if (tab.getType() != HiveParser.TOK_TABNAME ||
(tab.getChildCount() != 1 && tab.getChildCount() != 2)) {
LOG.error("Ignoring malformed Create table statement");
}
if (tab.getChildCount() == 2) {
String dbName = BaseSemanticAnalyzer
.unescapeIdentifier(tab.getChild(0).getText());
String tableName = BaseSemanticAnalyzer
.unescapeIdentifier(tab.getChild(1).getText());
fullTableName = dbName + "." + tableName;
} else {
fullTableName = BaseSemanticAnalyzer
.unescapeIdentifier(tab.getChild(0).getText());
}
LOG.info("Creating table " + fullTableName);
int numCh = ast.getChildCount();
for (int num = 1; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
// Handle storage format
switch (child.getToken().getType()) {
case HiveParser.TOK_TABLEFILEFORMAT:
if (child.getChildCount() < 2) {
throw new SemanticException(
"Incomplete specification of File Format. " +
"You must provide InputFormat, OutputFormat.");
}
inputFormat = BaseSemanticAnalyzer
.unescapeSQLString(child.getChild(0).getText());
outputFormat = BaseSemanticAnalyzer
.unescapeSQLString(child.getChild(1).getText());
if (child.getChildCount() == 3) {
serde = BaseSemanticAnalyzer
.unescapeSQLString(child.getChild(2).getText());
}
break;
case HiveParser.TOK_STORAGEHANDLER:
storageHandler = BaseSemanticAnalyzer
.unescapeSQLString(child.getChild(0).getText());
if (child.getChildCount() == 2) {
BaseSemanticAnalyzer.readProps(
(ASTNode) (child.getChild(1).getChild(0)),
serdeProps);
}
break;
case HiveParser.TOK_FILEFORMAT_GENERIC:
ASTNode grandChild = (ASTNode) child.getChild(0);
String name = (grandChild == null ? "" : grandChild.getText())
.trim().toUpperCase();
if (name.isEmpty()) {
LOG.error("File format in STORED AS clause is empty");
break;
}
break;
}
switch (child.getToken().getType()) {
case HiveParser.KW_EXTERNAL:
isExternal = true;
break;
case HiveParser.KW_TEMPORARY:
isTemporary = true;
break;
case HiveParser.TOK_LIKETABLE:
if (child.getChildCount() > 0) {
likeTableName = BaseSemanticAnalyzer
.getUnescapedName((ASTNode) child.getChild(0));
}
break;
case HiveParser.TOK_QUERY:
ctasNode = child;
break;
case HiveParser.TOK_TABLECOMMENT:
comment = BaseSemanticAnalyzer
.unescapeSQLString(child.getChild(0).getText());
break;
case HiveParser.TOK_TABLEPARTCOLS:
case HiveParser.TOK_TABCOLLIST:
case HiveParser.TOK_ALTERTABLE_BUCKETS:
break;
case HiveParser.TOK_TABLEROWFORMAT:
rowFormatNode = child;
break;
case HiveParser.TOK_TABLELOCATION:
location = BaseSemanticAnalyzer
.unescapeSQLString(child.getChild(0).getText());
break;
case HiveParser.TOK_TABLEPROPERTIES:
break;
case HiveParser.TOK_TABLESERIALIZER:
child = (ASTNode) child.getChild(0);
serde = BaseSemanticAnalyzer
.unescapeSQLString(child.getChild(0).getText());
break;
case HiveParser.TOK_TABLESKEWED:
break;
default:
throw new AssertionError("Unknown token: " + child.getToken());
}
}
StringBuilder sb = new StringBuilder(1024);
sb.append("Full table name: ").append(fullTableName).append('\n');
sb.append("\tisTemporary: ").append(isTemporary).append('\n');
sb.append("\tIsExternal: ").append(isExternal).append('\n');
if (inputFormat != null) {
sb.append("\tinputFormat: ").append(inputFormat).append('\n');
}
if (outputFormat != null) {
sb.append("\toutputFormat: ").append(outputFormat).append('\n');
}
if (serde != null) {
sb.append("\tserde: ").append(serde).append('\n');
}
if (storageHandler != null) {
sb.append("\tstorageHandler: ").append(storageHandler).append('\n');
}
if (likeTableName != null) {
sb.append("\tlikeTableName: ").append(likeTableName);
}
if (comment != null) {
sb.append("\tcomment: ").append(comment);
}
if (location != null) {
sb.append("\tlocation: ").append(location);
}
if (ctasNode != null) {
sb.append("\tctasNode: ").append(((ASTNode) ctasNode).dump());
}
if (rowFormatNode != null) {
sb.append("\trowFormatNode: ").append(((ASTNode) rowFormatNode).dump());
}
fw.write(sb.toString());
}
fw.flush();
fw.close();
} catch (Exception e) {
LOG.error("Unable to log logical plan to file", e);
}
}
private void parseQuery(String sqlText) throws Exception {
ParseDriver parseDriver = new ParseDriver();
ASTNode node = parseDriver.parse(sqlText);
analyzeHiveParseTree(node);
}
/**
* This is an attempt to use the parser. Sematnic issues are not handled here.
* <p/>
* Trying to recompile the query runs into some issues in the preExec
* hook but we need to make sure all the semantic issues are handled. May be we should save the AST in the
* Semantic analyzer and have it available in the preExec hook so that we walk with it freely.
*
* @param context
* @param ast
* @return
* @throws SemanticException
*/
@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
throws SemanticException {
analyzeHiveParseTree(ast);
return ast;
}
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks) throws SemanticException {
}
private class MySemanticAnaylzer extends BaseSemanticAnalyzer {
public MySemanticAnaylzer(HiveConf conf) throws SemanticException {
super(conf);
}
public void analyzeInternal(ASTNode ast) throws SemanticException {
throw new RuntimeException("Not implemented");
}
public void setInputs(HashSet<ReadEntity> inputs) {
this.inputs = inputs;
}
} }
} }
...@@ -280,6 +280,8 @@ public class HiveDataModelGenerator { ...@@ -280,6 +280,8 @@ public class HiveDataModelGenerator {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("clusterName", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(),
...@@ -322,8 +324,6 @@ public class HiveDataModelGenerator { ...@@ -322,8 +324,6 @@ public class HiveDataModelGenerator {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
//new AttributeDefinition("type", DefinedTypes.HIVE_TYPE.getName(), Multiplicity
// .REQUIRED, false, null),
new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("comment", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("comment", DataTypes.STRING_TYPE.getName(),
...@@ -452,8 +452,9 @@ public class HiveDataModelGenerator { ...@@ -452,8 +452,9 @@ public class HiveDataModelGenerator {
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("functionType", HiveDataTypes.HIVE_FUNCTION_TYPE.getName(), new AttributeDefinition("functionType", HiveDataTypes.HIVE_FUNCTION_TYPE.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("resourceUris", HiveDataTypes.HIVE_RESOURCEURI.getName(), new AttributeDefinition("resourceUris",
Multiplicity.COLLECTION, false, null), DataTypes.arrayTypeName(HiveDataTypes.HIVE_RESOURCEURI.getName()), Multiplicity.OPTIONAL, false,
null),
}; };
HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>( HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
......
...@@ -29,6 +29,10 @@ hive conf directory: ...@@ -29,6 +29,10 @@ hive conf directory:
<name>hive.hook.dgi.url</name> <name>hive.hook.dgi.url</name>
<value>http://localhost:21000/</value> <value>http://localhost:21000/</value>
</property> </property>
<property>
<name>hive.cluster.name</name>
<value>primary</value>
</property>
</verbatim> </verbatim>
Usage: <dgi package>/bin/import-hive.sh. The logs are in <dgi package>/logs/import-hive.log Usage: <dgi package>/bin/import-hive.sh. The logs are in <dgi package>/logs/import-hive.log
...@@ -44,12 +48,16 @@ The hook submits the request to a thread pool executor to avoid blocking the com ...@@ -44,12 +48,16 @@ The hook submits the request to a thread pool executor to avoid blocking the com
<value>org.apache.hadoop.metadata.hive.hook.HiveHook</value> <value>org.apache.hadoop.metadata.hive.hook.HiveHook</value>
</property> </property>
</verbatim> </verbatim>
* Add the following property in hive-ste.xml with the DGI endpoint for your set-up * Add the following properties in hive-ste.xml with the DGI endpoint for your set-up
<verbatim> <verbatim>
<property> <property>
<name>hive.hook.dgi.url</name> <name>hive.hook.dgi.url</name>
<value>http://localhost:21000/</value> <value>http://localhost:21000/</value>
</property> </property>
<property>
<name>hive.cluster.name</name>
<value>primary</value>
</property>
</verbatim> </verbatim>
* Add 'export HIVE_AUX_JARS_PATH=<dgi package>/hook/hive' in hive-env.sh * Add 'export HIVE_AUX_JARS_PATH=<dgi package>/hook/hive' in hive-env.sh
......
...@@ -24,7 +24,6 @@ import org.apache.hadoop.hive.ql.Driver; ...@@ -24,7 +24,6 @@ import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge; import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes; import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
...@@ -32,8 +31,11 @@ import org.testng.Assert; ...@@ -32,8 +31,11 @@ import org.testng.Assert;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.io.File;
public class HiveHookIT { public class HiveHookIT {
private static final String DGI_URL = "http://localhost:21000/"; private static final String DGI_URL = "http://localhost:21000/";
private static final String CLUSTER_NAME = "test";
private Driver driver; private Driver driver;
private MetadataServiceClient dgiCLient; private MetadataServiceClient dgiCLient;
private SessionState ss; private SessionState ss;
...@@ -59,6 +61,10 @@ public class HiveHookIT { ...@@ -59,6 +61,10 @@ public class HiveHookIT {
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL); hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true"); hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true"); hiveConf.set("hive.hook.dgi.synchronous", "true");
hiveConf.set(HiveMetaStoreBridge.HIVE_CLUSTER_NAME, CLUSTER_NAME);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODE, true); //to not use hdfs
hiveConf.setVar(HiveConf.ConfVars.HIVETESTMODEPREFIX, "");
hiveConf.set("fs.pfile.impl", "org.apache.hadoop.fs.ProxyLocalFileSystem");
return hiveConf; return hiveConf;
} }
...@@ -69,7 +75,7 @@ public class HiveHookIT { ...@@ -69,7 +75,7 @@ public class HiveHookIT {
@Test @Test
public void testCreateDatabase() throws Exception { public void testCreateDatabase() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); String dbName = "db" + random();
runCommand("create database " + dbName); runCommand("create database " + dbName);
assertDatabaseIsRegistered(dbName); assertDatabaseIsRegistered(dbName);
...@@ -77,16 +83,16 @@ public class HiveHookIT { ...@@ -77,16 +83,16 @@ public class HiveHookIT {
@Test @Test
public void testCreateTable() throws Exception { public void testCreateTable() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); String dbName = "db" + random();
runCommand("create database " + dbName); runCommand("create database " + dbName);
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); String tableName = "table" + random();
runCommand("create table " + dbName + "." + tableName + "(id int, name string)"); runCommand("create table " + dbName + "." + tableName + "(id int, name string)");
assertTableIsRegistered(tableName); assertTableIsRegistered(dbName, tableName);
tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)"); runCommand("create table " + tableName + "(id int, name string) partitioned by(dt string)");
assertTableIsRegistered(tableName); assertTableIsRegistered("default", tableName);
//Create table where database doesn't exist, will create database instance as well //Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered("default"); assertDatabaseIsRegistered("default");
...@@ -94,27 +100,137 @@ public class HiveHookIT { ...@@ -94,27 +100,137 @@ public class HiveHookIT {
@Test @Test
public void testCTAS() throws Exception { public void testCTAS() throws Exception {
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)");
String ctasTableName = "table" + random();
String query = "create table " + ctasTableName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered("default", ctasTableName);
assertProcessIsRegistered(query);
}
@Test
public void testCreateView() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)");
String viewName = "table" + random();
String query = "create view " + viewName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered("default", viewName);
assertProcessIsRegistered(query);
}
@Test
public void testLoadData() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)");
String loadFile = file("load");
String query = "load data local inpath 'file://" + loadFile + "' into table " + tableName;
runCommand(query);
assertProcessIsRegistered(query);
}
@Test
public void testInsert() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string) partitioned by(dt string)");
String insertTableName = "table" + random();
runCommand("create table " + insertTableName + "(name string) partitioned by(dt string)");
String query = "insert into " + insertTableName + " partition(dt = '2015-01-01') select name from "
+ tableName + " where dt = '2015-01-01'";
runCommand(query);
assertProcessIsRegistered(query);
assertPartitionIsRegistered("default", insertTableName, "2015-01-01");
}
private String random() {
return RandomStringUtils.randomAlphanumeric(5).toLowerCase();
}
private String file(String tag) throws Exception {
String filename = "./target/" + tag + "-data-" + random();
File file = new File(filename);
file.createNewFile();
return file.getAbsolutePath();
}
private String mkdir(String tag) throws Exception {
String filename = "./target/" + tag + "-data-" + random();
File file = new File(filename);
file.mkdirs();
return file.getAbsolutePath();
}
@Test
public void testExportImport() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(name string)");
String filename = "pfile://" + mkdir("export");
String query = "export table " + tableName + " to '" + filename + "'";
runCommand(query);
assertProcessIsRegistered(query);
tableName = "table" + random();
runCommand("create table " + tableName + "(name string)");
query = "import table " + tableName + " from '" + filename + "'";
runCommand(query);
assertProcessIsRegistered(query);
}
@Test
public void testSelect() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)"); runCommand("create table " + tableName + "(id int, name string)");
String newTableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase(); String query = "select * from " + tableName;
String query = "create table " + newTableName + " as select * from " + tableName;
runCommand(query); runCommand(query);
assertProcessIsRegistered(query);
}
assertTableIsRegistered(newTableName); private void assertProcessIsRegistered(String queryStr) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), "queryText", query); String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(), queryStr);
assertEntityIsRegistered(dslQuery);
} }
private void assertTableIsRegistered(String tableName) throws Exception { private void assertTableIsRegistered(String dbName, String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName); String query = String.format("%s where name = '%s', dbName where name = '%s' and clusterName = '%s'",
HiveDataTypes.HIVE_TABLE.getName(), tableName, dbName, CLUSTER_NAME);
assertEntityIsRegistered(query);
} }
private void assertDatabaseIsRegistered(String dbName) throws Exception { private void assertDatabaseIsRegistered(String dbName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName); String query = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(),
dbName, CLUSTER_NAME);
assertEntityIsRegistered(query);
}
private void assertPartitionIsRegistered(String dbName, String tableName, String value) throws Exception {
String typeName = HiveDataTypes.HIVE_PARTITION.getName();
String dbType = HiveDataTypes.HIVE_DB.getName();
String tableType = HiveDataTypes.HIVE_TABLE.getName();
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.values', ['%s']).as('p')."
+ "out('__%s.tableName').has('%s.name', '%s').out('__%s.dbName').has('%s.name', '%s')"
+ ".has('%s.clusterName', '%s').back('p').toList()", typeName, typeName, value, typeName,
tableType, tableName, tableType, dbType, dbName, dbType, CLUSTER_NAME);
JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
Assert.assertEquals(results.length(), 1);
} }
private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception{ private void assertEntityIsRegistered(String dslQuery) throws Exception{
JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue); JSONArray results = dgiCLient.searchByDSL(dslQuery);
Assert.assertEquals(results.length(), 1); Assert.assertEquals(results.length(), 1);
} }
} }
...@@ -205,40 +205,6 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest { ...@@ -205,40 +205,6 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
assertDatabaseIsRegistered(dbName); assertDatabaseIsRegistered(dbName);
} }
@Test
public void testCreateTable() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create database " + dbName);
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + dbName + "." + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered("default");
}
@Test
public void testCTAS() throws Exception {
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
String newTableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
String query = "create table " + newTableName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered(newTableName);
assertInstanceIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), "queryText", query);
}
private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception { private void assertDatabaseIsRegistered(String dbName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName); assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
} }
......
...@@ -208,40 +208,6 @@ public class SSLHiveHookIT { ...@@ -208,40 +208,6 @@ public class SSLHiveHookIT {
assertDatabaseIsRegistered(dbName); assertDatabaseIsRegistered(dbName);
} }
@Test
public void testCreateTable() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create database " + dbName);
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + dbName + "." + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered("default");
}
@Test
public void testCTAS() throws Exception {
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
String newTableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
String query = "create table " + newTableName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered(newTableName);
assertInstanceIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), "queryText", query);
}
private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception { private void assertDatabaseIsRegistered(String dbName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName); assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
} }
......
...@@ -25,10 +25,8 @@ import com.sun.jersey.api.client.config.DefaultClientConfig; ...@@ -25,10 +25,8 @@ import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler; import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.security.SecureClientUtils; import org.apache.hadoop.metadata.security.SecureClientUtils;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization; import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.Serialization;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
...@@ -36,6 +34,7 @@ import org.slf4j.Logger; ...@@ -36,6 +34,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.ws.rs.HttpMethod; import javax.ws.rs.HttpMethod;
import javax.ws.rs.POST;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriBuilder;
...@@ -49,14 +48,28 @@ import static org.apache.hadoop.metadata.security.SecurityProperties.TLS_ENABLED ...@@ -49,14 +48,28 @@ import static org.apache.hadoop.metadata.security.SecurityProperties.TLS_ENABLED
*/ */
public class MetadataServiceClient { public class MetadataServiceClient {
private static final Logger LOG = LoggerFactory.getLogger(MetadataServiceClient.class); private static final Logger LOG = LoggerFactory.getLogger(MetadataServiceClient.class);
public static final String NAME = "name";
public static final String GUID = "GUID";
public static final String TYPENAME = "typeName";
public static final String DEFINITION = "definition";
public static final String ERROR = "error";
public static final String STACKTRACE = "stackTrace";
public static final String REQUEST_ID = "requestId"; public static final String REQUEST_ID = "requestId";
public static final String RESULTS = "results"; public static final String RESULTS = "results";
public static final String TOTAL_SIZE = "totalSize"; public static final String COUNT = "count";
private static final String BASE_URI = "api/metadata/"; public static final String ROWS = "rows";
private static final String URI_TYPES = "types";
private static final String URI_ENTITIES = "entities"; public static final String BASE_URI = "api/metadata/";
private static final String URI_TRAITS = "traits"; public static final String TYPES = "types";
private static final String URI_SEARCH = "discovery/search"; public static final String URI_ENTITIES = "entities";
public static final String URI_TRAITS = "traits";
public static final String URI_SEARCH = "discovery/search";
public static final String QUERY = "query";
public static final String QUERY_TYPE = "queryType";
private WebResource service; private WebResource service;
...@@ -73,6 +86,7 @@ public class MetadataServiceClient { ...@@ -73,6 +86,7 @@ public class MetadataServiceClient {
} catch (Exception e) { } catch (Exception e) {
LOG.info("Error processing client configuration.", e); LOG.info("Error processing client configuration.", e);
} }
URLConnectionClientHandler handler = SecureClientUtils.getClientConnectionHandler(config, clientConfig); URLConnectionClientHandler handler = SecureClientUtils.getClientConnectionHandler(config, clientConfig);
Client client = new Client(handler, config); Client client = new Client(handler, config);
...@@ -86,17 +100,18 @@ public class MetadataServiceClient { ...@@ -86,17 +100,18 @@ public class MetadataServiceClient {
} }
static enum API { static enum API {
//Type operations //Type operations
CREATE_TYPE(BASE_URI + URI_TYPES, HttpMethod.POST), CREATE_TYPE(BASE_URI + TYPES, HttpMethod.POST),
GET_TYPE(BASE_URI + URI_TYPES, HttpMethod.GET), GET_TYPE(BASE_URI + TYPES, HttpMethod.GET),
LIST_TYPES(BASE_URI + URI_TYPES, HttpMethod.GET), LIST_TYPES(BASE_URI + TYPES, HttpMethod.GET),
LIST_TRAIT_TYPES(BASE_URI + URI_TYPES + "?type=trait", HttpMethod.GET), LIST_TRAIT_TYPES(BASE_URI + TYPES + "?type=trait", HttpMethod.GET),
//Entity operations //Entity operations
CREATE_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.POST), CREATE_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.POST),
GET_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.GET), GET_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.GET),
UPDATE_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.PUT), UPDATE_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.PUT),
LIST_ENTITY(BASE_URI + URI_ENTITIES + "?type=", HttpMethod.GET), LIST_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.GET),
//Trait operations //Trait operations
ADD_TRAITS(BASE_URI + URI_TRAITS, HttpMethod.POST), ADD_TRAITS(BASE_URI + URI_TRAITS, HttpMethod.POST),
...@@ -145,7 +160,7 @@ public class MetadataServiceClient { ...@@ -145,7 +160,7 @@ public class MetadataServiceClient {
WebResource resource = getResource(API.GET_TYPE, typeName); WebResource resource = getResource(API.GET_TYPE, typeName);
try { try {
JSONObject response = callAPIWithResource(API.GET_TYPE, resource); JSONObject response = callAPIWithResource(API.GET_TYPE, resource);
return response.getString("definition"); return response.getString(DEFINITION);
} catch (MetadataServiceException e) { } catch (MetadataServiceException e) {
if (e.getStatus() == ClientResponse.Status.NOT_FOUND) { if (e.getStatus() == ClientResponse.Status.NOT_FOUND) {
return null; return null;
...@@ -185,7 +200,7 @@ public class MetadataServiceClient { ...@@ -185,7 +200,7 @@ public class MetadataServiceClient {
public Referenceable getEntity(String guid) throws MetadataServiceException { public Referenceable getEntity(String guid) throws MetadataServiceException {
JSONObject jsonResponse = callAPI(API.GET_ENTITY, null, guid); JSONObject jsonResponse = callAPI(API.GET_ENTITY, null, guid);
try { try {
String entityInstanceDefinition = jsonResponse.getString(MetadataServiceClient.RESULTS); String entityInstanceDefinition = jsonResponse.getString(MetadataServiceClient.DEFINITION);
return InstanceSerialization.fromJsonReferenceable(entityInstanceDefinition, true); return InstanceSerialization.fromJsonReferenceable(entityInstanceDefinition, true);
} catch (JSONException e) { } catch (JSONException e) {
throw new MetadataServiceException(e); throw new MetadataServiceException(e);
...@@ -194,7 +209,7 @@ public class MetadataServiceClient { ...@@ -194,7 +209,7 @@ public class MetadataServiceClient {
public JSONObject searchEntity(String searchQuery) throws MetadataServiceException { public JSONObject searchEntity(String searchQuery) throws MetadataServiceException {
WebResource resource = getResource(API.SEARCH); WebResource resource = getResource(API.SEARCH);
resource = resource.queryParam("query", searchQuery); resource = resource.queryParam(QUERY, searchQuery);
return callAPIWithResource(API.SEARCH, resource); return callAPIWithResource(API.SEARCH, resource);
} }
...@@ -224,10 +239,10 @@ public class MetadataServiceClient { ...@@ -224,10 +239,10 @@ public class MetadataServiceClient {
*/ */
public JSONArray searchByDSL(String query) throws MetadataServiceException { public JSONArray searchByDSL(String query) throws MetadataServiceException {
WebResource resource = getResource(API.SEARCH_DSL); WebResource resource = getResource(API.SEARCH_DSL);
resource = resource.queryParam("query", query); resource = resource.queryParam(QUERY, query);
JSONObject result = callAPIWithResource(API.SEARCH_DSL, resource); JSONObject result = callAPIWithResource(API.SEARCH_DSL, resource);
try { try {
return result.getJSONObject("results").getJSONArray("rows"); return result.getJSONObject(RESULTS).getJSONArray(ROWS);
} catch (JSONException e) { } catch (JSONException e) {
throw new MetadataServiceException(e); throw new MetadataServiceException(e);
} }
...@@ -241,7 +256,7 @@ public class MetadataServiceClient { ...@@ -241,7 +256,7 @@ public class MetadataServiceClient {
*/ */
public JSONObject searchByGremlin(String gremlinQuery) throws MetadataServiceException { public JSONObject searchByGremlin(String gremlinQuery) throws MetadataServiceException {
WebResource resource = getResource(API.SEARCH_GREMLIN); WebResource resource = getResource(API.SEARCH_GREMLIN);
resource = resource.queryParam("query", gremlinQuery); resource = resource.queryParam(QUERY, gremlinQuery);
return callAPIWithResource(API.SEARCH_GREMLIN, resource); return callAPIWithResource(API.SEARCH_GREMLIN, resource);
} }
...@@ -253,7 +268,7 @@ public class MetadataServiceClient { ...@@ -253,7 +268,7 @@ public class MetadataServiceClient {
*/ */
public JSONObject searchByFullText(String query) throws MetadataServiceException { public JSONObject searchByFullText(String query) throws MetadataServiceException {
WebResource resource = getResource(API.SEARCH_FULL_TEXT); WebResource resource = getResource(API.SEARCH_FULL_TEXT);
resource = resource.queryParam("query", query); resource = resource.queryParam(QUERY, query);
return callAPIWithResource(API.SEARCH_FULL_TEXT, resource); return callAPIWithResource(API.SEARCH_FULL_TEXT, resource);
} }
...@@ -286,7 +301,9 @@ public class MetadataServiceClient { ...@@ -286,7 +301,9 @@ public class MetadataServiceClient {
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.method(api.getMethod(), ClientResponse.class, requestObject); .method(api.getMethod(), ClientResponse.class, requestObject);
if (clientResponse.getStatus() == Response.Status.OK.getStatusCode()) { Response.Status expectedStatus = (api.getMethod() == HttpMethod.POST)
? Response.Status.CREATED : Response.Status.OK;
if (clientResponse.getStatus() == expectedStatus.getStatusCode()) {
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
try { try {
return new JSONObject(responseAsString); return new JSONObject(responseAsString);
......
...@@ -18,13 +18,11 @@ ...@@ -18,13 +18,11 @@
package org.apache.hadoop.metadata; package org.apache.hadoop.metadata;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
public class PropertiesUtil { public class PropertiesUtil {
...@@ -33,11 +31,11 @@ public class PropertiesUtil { ...@@ -33,11 +31,11 @@ public class PropertiesUtil {
private static final String APPLICATION_PROPERTIES = "application.properties"; private static final String APPLICATION_PROPERTIES = "application.properties";
public static final String CLIENT_PROPERTIES = "client.properties"; public static final String CLIENT_PROPERTIES = "client.properties";
public static final PropertiesConfiguration getApplicationProperties() throws MetadataException { public static PropertiesConfiguration getApplicationProperties() throws MetadataException {
return getPropertiesConfiguration(APPLICATION_PROPERTIES); return getPropertiesConfiguration(APPLICATION_PROPERTIES);
} }
public static final PropertiesConfiguration getClientProperties() throws MetadataException { public static PropertiesConfiguration getClientProperties() throws MetadataException {
return getPropertiesConfiguration(CLIENT_PROPERTIES); return getPropertiesConfiguration(CLIENT_PROPERTIES);
} }
......
...@@ -31,5 +31,6 @@ public interface SecurityProperties { ...@@ -31,5 +31,6 @@ public interface SecurityProperties {
public static final String SERVER_CERT_PASSWORD_KEY = "password"; public static final String SERVER_CERT_PASSWORD_KEY = "password";
public static final String CLIENT_AUTH_KEY = "client.auth.enabled"; public static final String CLIENT_AUTH_KEY = "client.auth.enabled";
public static final String CERT_STORES_CREDENTIAL_PROVIDER_PATH = "cert.stores.credential.provider.path"; public static final String CERT_STORES_CREDENTIAL_PROVIDER_PATH = "cert.stores.credential.provider.path";
String SSL_CLIENT_PROPERTIES = "ssl-client.xml"; public static final String SSL_CLIENT_PROPERTIES = "ssl-client.xml";
public static final String BIND_ADDRESS = "metadata.server.bind.address";
} }
...@@ -123,4 +123,7 @@ footer.navbar-bottom img { ...@@ -123,4 +123,7 @@ footer.navbar-bottom img {
#Details #Details
{ {
height: 800px; height: 800px;
}
.pagination {
float: right;
} }
\ No newline at end of file
...@@ -27,7 +27,7 @@ angular.module('dgc.details').controller('DetailsController', ['$scope', '$state ...@@ -27,7 +27,7 @@ angular.module('dgc.details').controller('DetailsController', ['$scope', '$state
$scope.details = data; $scope.details = data;
$scope.tableName = data.values.name; $scope.tableName = data.values.name;
}); });
$scope.isString = angular.isString; $scope.isString = angular.isString;
$scope.schemas = DetailsResource.get({ $scope.schemas = DetailsResource.get({
......
...@@ -30,4 +30,23 @@ angular.module('dgc.details').factory('DetailsResource', ['$resource', function( ...@@ -30,4 +30,23 @@ angular.module('dgc.details').factory('DetailsResource', ['$resource', function(
responseType: 'json' responseType: 'json'
} }
}); });
//$scope.getSchema= function (tableName) {
//
// $http.get('/api/metadata/lineage/hive/table/'+tableName +'/schema')
// .success(function (data) {
// $scope.iserror1=false;
// $scope.schema= angular.fromJson(data.results.rows);
// // console.log(tableName);
//
//
// })
// .error(function () {
// // alert("Sorry No response");
//
//
//
// });
//}
}]); }]);
...@@ -22,10 +22,9 @@ ...@@ -22,10 +22,9 @@
<!--{{value}}--> <!--{{value}}-->
<!--</div>--> <!--</div>-->
<div class="row" data-ng-repeat="(key1,value1) in value" ng-if="value1">
<div class="col-md-6" data-ng-if="!isString(value1)" data-ng-repeat="(key2,value2) in value1 track by $index"></div>
<div data-ng-if="isString(value2)" data-ng-repeat="(key3,value3) in value2"> {{key3}}: {{value3}}</div>
<div class="col-md-6" data-ng-if="isString(value1)"> {{key1}} : {{value1 | date:'medium'}}</div>
<div class="row" data-ng-repeat="(key1,value1) in value" ng-if="value1"> </div>
<div class="col-md-6" data-ng-if="!isString(value1)" data-ng-repeat="(key2,value2) in value1 track by $index"></div> \ No newline at end of file
<div data-ng-if="isString(value2)" data-ng-repeat="(key3,value3) in value2"> {{key3}}: {{value3}}</div>
<div class="col-md-6" data-ng-if="isString(value1)"> {{key1}}: {{value1 | date:'medium'}}</div>
</div>
<div data-ng-include="'/modules/lineage/views/lineage.html'"></div>
\ No newline at end of file
...@@ -51,4 +51,4 @@ ...@@ -51,4 +51,4 @@
</tr> </tr>
</tbody> </tbody>
</table> </table>
</div> </div>
\ No newline at end of file
...@@ -24,7 +24,7 @@ ...@@ -24,7 +24,7 @@
<span class="icon-bar"></span> <span class="icon-bar"></span>
<span class="icon-bar"></span> <span class="icon-bar"></span>
</button> </button>
<a data-ui-sref="search" data-ui-sref-active="active"><h1>DGI</h1></a> <a data-ui-sref="search" data-ui-sref-active="active"><h1>DGI</h1></a>
</div> </div>
<nav class="collapse navbar-collapse" data-collapse="isCollapsed" data-role="navigation"> <nav class="collapse navbar-collapse" data-collapse="isCollapsed" data-role="navigation">
<ul class="navbar-nav nav" data-ng-if="isLoggedIn()"> <ul class="navbar-nav nav" data-ng-if="isLoggedIn()">
......
...@@ -133,4 +133,4 @@ angular.module('dgc.lineage').controller('LineageController', ['$element', '$sco ...@@ -133,4 +133,4 @@ angular.module('dgc.lineage').controller('LineageController', ['$element', '$sco
} }
} }
]); ]);
\ No newline at end of file
...@@ -17,5 +17,9 @@ ...@@ -17,5 +17,9 @@
--> -->
<div data-ng-controller="LineageController"> <div data-ng-controller="LineageController">
<svg></svg>
<svg class="lineage-viz"><g></svg>
</div> </div>
...@@ -21,7 +21,8 @@ ...@@ -21,7 +21,8 @@
angular.module('dgc.navigation').controller('NavigationController', ['$scope', 'NavigationResource', angular.module('dgc.navigation').controller('NavigationController', ['$scope', 'NavigationResource',
function($scope, NavigationResource) { function($scope, NavigationResource) {
$scope.leftnav = NavigationResource.get();
$scope.leftnav= NavigationResource.get();
$scope.updateVar = function(event) { $scope.updateVar = function(event) {
$scope.$$prevSibling.query = angular.element(event.target).text(); $scope.$$prevSibling.query = angular.element(event.target).text();
......
...@@ -17,5 +17,4 @@ ...@@ -17,5 +17,4 @@
*/ */
'use strict'; 'use strict';
angular.module('dgc.navigation',[]);
angular.module('dgc.navigation', []);
...@@ -21,54 +21,86 @@ ...@@ -21,54 +21,86 @@
angular.module('dgc.search').controller('SearchController', ['$scope', '$location', '$http', '$state', '$stateParams', 'SearchResource', 'NotificationService', angular.module('dgc.search').controller('SearchController', ['$scope', '$location', '$http', '$state', '$stateParams', 'SearchResource', 'NotificationService',
function($scope, $location, $http, $state, $stateParams, SearchResource, NotificationService) { function($scope, $location, $http, $state, $stateParams, SearchResource, NotificationService) {
$scope.types = ['table', 'column', 'db', 'view', 'loadprocess', 'storagedesc']; $scope.types = ['table','column','db','view','loadprocess','storagedesc'];
$scope.results = []; $scope.results = [];
$scope.resultCount = 0; $scope.resultCount=0;
$scope.isCollapsed = true; $scope.isCollapsed = true;
$scope.currentPage = 1;
$scope.itemsPerPage = 10;
$scope.totalItems = 40;
$scope.filteredResults = [];
$scope.resultRows = [];
$scope.setPage = function (pageNo) {
$scope.currentPage = pageNo;
};
$scope.search = function(query) { $scope.search = function(query) {
$scope.results = []; $scope.results = [];
NotificationService.reset(); NotificationService.reset();
$scope.limit = 4; $scope.limit = 4;
SearchResource.search({ SearchResource.search({query:query}, function searchSuccess(response) {
query: query
}, function searchSuccess(response) { $scope.resultCount=response.count;
$scope.results = response.results; $scope.results = response.results;
$scope.resultCount = response.count; $scope.resultRows = $scope.results.rows;
if ($scope.results.length < 1) { $scope.totalItems = $scope.resultCount;
NotificationService.error('No Result found', false); $scope.$watch('currentPage + itemsPerPage', function() {
} var begin = (($scope.currentPage - 1) * $scope.itemsPerPage),
$state.go('search.results', { end = begin + $scope.itemsPerPage;
query: query $scope.searchTypesAvailable = $scope.typeAvailable();
}, { if ($scope.searchTypesAvailable) {
$scope.searchMessage = 'loading results...';
$scope.filteredResults = $scope.resultRows.slice(begin, end);
$scope.pageCount = function () {
return Math.ceil($scope.resultCount / $scope.itemsPerPage);
};
if ($scope.results.rows)
$scope.searchMessage = $scope.results.rows.length +' results matching your search query '+ $scope.query +' were found';
else
$scope.searchMessage = '0 results matching your search query '+ $scope.query +' were found';
if ($scope.results.length < 1) {
NotificationService.error('No Result found', false);
}
} else {
$scope.searchMessage = '0 results matching your search query '+ $scope.query +' were found';
}
});
$state.go('search.results', {query:query}, {
location: 'replace' location: 'replace'
}); });
}, function searchError(err) { }, function searchError(err) {
NotificationService.error('Error occurred during executing search query, error status code = ' + err.status + ', status text = ' + err.statusText, false); NotificationService.error('Error occurred during executing search query, error status code = ' + err.status + ', status text = ' + err.statusText, false);
}); });
}; };
$scope.typeAvailable = function() { $scope.typeAvailable = function() {
return $scope.types.indexOf(this.results.dataType.typeName && this.results.dataType.typeName.toLowerCase()) > -1;
};
/* $scope.$watch("currentPage + numPerPage", function() { if($scope.results.dataType) {
var begin = (($scope.currentPage - 1) * $scope.numPerPage); return $scope.types.indexOf($scope.results.dataType.typeName && $scope.results.dataType.typeName.toLowerCase()) > -1;
var end = begin + $scope.numPerPage; }
};
$scope.filteredResults = $scope.rows.slice(begin, end); $scope.doToggle = function($event,el) {
});*/ this.isCollapsed = !el;
};
$scope.filterSearchResults = function(items) { $scope.filterSearchResults = function(items) {
var res = {}; var res = {};
var count = 0;
angular.forEach(items, function(value, key) { angular.forEach(items, function(value, key) {
if ((typeof value !== 'object')) if(typeof value !== 'object') {
res[key] = value; res[key] = value;
count++;
}
}); });
$scope.keyLength = count;
return res; return res;
}; };
$scope.searchQuery = $location.search();
$scope.query = $stateParams.query; $scope.query=($location.search()).query;
if ($scope.query) { if ($scope.query) {
$scope.searchMessage = 'searching...';
$scope.search($scope.query); $scope.search($scope.query);
} }
} }
]); ]);
\ No newline at end of file
...@@ -21,7 +21,6 @@ ...@@ -21,7 +21,6 @@
//Setting up route //Setting up route
angular.module('dgc.search').config(['$stateProvider', angular.module('dgc.search').config(['$stateProvider',
function($stateProvider) { function($stateProvider) {
// states for my app // states for my app
$stateProvider.state('search', { $stateProvider.state('search', {
url: '/search', url: '/search',
...@@ -31,6 +30,10 @@ angular.module('dgc.search').config(['$stateProvider', ...@@ -31,6 +30,10 @@ angular.module('dgc.search').config(['$stateProvider',
url: '/:query', url: '/:query',
templateUrl: '/modules/search/views/searchResult.html', templateUrl: '/modules/search/views/searchResult.html',
controller: 'SearchController' controller: 'SearchController'
}); }).state('search.results', {
url: '?query',
templateUrl: '/modules/search/views/searchResult.html',
controller:'SearchController'
});
} }
]); ]);
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
<form name="form" novalidate class="container"> <form name="form" novalidate class="container">
<div class="col-lg-7 col-lg-offset-3"> <div class="col-lg-7 col-lg-offset-3">
<div class="row input-group"> <div class="row input-group">
<input type="text" class="form-control" placeholder="Search" data-ng-model="query" data-typeahead="type for type in types" required/> <input type="text" class="form-control" placeholder="Search" data-ng-model="query" required/>
<span class="input-group-btn"> <span class="input-group-btn">
<button class="btn btn-success" type="submit" data-ng-disabled="form.$invalid" data-ng-click="search(query)"> <button class="btn btn-success" type="submit" data-ng-disabled="form.$invalid" data-ng-click="search(query)">
<i class="glyphicon glyphicon-search white "></i> <i class="glyphicon glyphicon-search white "></i>
...@@ -37,9 +37,9 @@ ...@@ -37,9 +37,9 @@
<div class="container"> <div class="container">
<div class="row"> <div class="row">
<div data-ng-include="'/modules/navigation/views/navigation.html'"></div> <div data-ng-include="'/modules/navigation/views/navigation.html'"></div>
<div class="col-lg-9" data-ui-view=""></div> <div class="col-lg-9" data-ui-view="" style="min-height: 1350px;"></div>
</div> </div>
</div> </div>
</div> </div>
\ No newline at end of file
...@@ -15,28 +15,33 @@ ...@@ -15,28 +15,33 @@
~ See the License for the specific language governing permissions and ~ See the License for the specific language governing permissions and
~ limitations under the License. ~ limitations under the License.
--> -->
<!--
<h4 ng-show='results.rows.length > 0'>{{results.rows.length}} results matching your search query "{{query}}" were found</h4>
<h4 ng-show='results.rows.length == "0"'>searching .....</h4>
<h4 ng-show='!(results.rows)'>0 results matching your search query "{{query}}" were found</h4>-->
<h4 ng-show="searchMessage">{{searchMessage}}</h4>
<h4>{{resultCount}} results matching your search query "{{query}}" were found</h4>
<ul class="list-unstyled"> <ul class="list-unstyled">
<li ng-repeat="result in results.rows" class="searchresults"> <li ng-repeat="result in filteredResults" class="searchresults">
<div data-ng-if="typeAvailable()"> <h4><a data-ui-sref="details({id:result['$id$'].id})">{{result.name}}</a></h4>
<h4><a data-ui-sref="details({id:result['$id$'].id})">{{result.name}}</a></h4> <p>{{result.description}}</p>
<p>{{result.description}}</p>
<span ng-repeat="(key, value) in filterSearchResults(result)" > <span ng-repeat="(key, value) in filterSearchResults(result)" >
<span ng-show="$index < 4"><b>{{key}}: </b>{{value}}{{$index+1 === limit ? '' : ', '}}</span> <span ng-show="$index < 4"><b>{{key}}: </b>{{value}}{{$index+1 === limit ? '' : ', '}}</span>
</span> </span>
<div collapse="isCollapsed"> <div collapse="isCollapsed">
<span class="well well-lg"><span ng-repeat="(key, value) in filterSearchResults(result)" > <span ng-repeat="(key, value) in filterSearchResults(result)" >
<span ng-show="$index > 4"><b>{{key}}: </b>{{value}}{{$last ? '' : ', '}}</span> <span ng-show="$index > 4"><b>{{key}}: </b>{{value}}{{$last ? '' : ', '}}</span>
</span></span> </span>
</div> </div>
<a href="javascript: void(0);" class="show-more" ng-click="isCollapsed = !isCollapsed">..show more</a> <a href ng-show="isCollapsed && (keyLength > 4)" ng-click="doToggle($event,isCollapsed)">..show more</a>
<!-- <a href="javascript: void(0);" bn-slide-show class="show-more" ng-click="doToggle(!isCollapsed)">..show more</a>--> <a href ng-show="!isCollapsed" ng-click="doToggle($event,isCollapsed)">..show less</a>
<h5>Tags : <a ng-repeat="(key, value) in result['$traits$']" data-ui-sref="search.results({query: key})">{{key}}</a> </h5>
</div>
<div data-ng-if="!typeAvailable()" data-ng-include="'/modules/search/views/types/guid.html'"></div> <h5>Tags : <a ng-repeat="(key, value) in result['$traits$']" data-ui-sref="search.results({query: key})">{{key}}</a> </h5>
</li> <div data-ng-if="!searchTypesAvailable" data-ng-include="'/modules/search/views/types/guid.html'"></div>
</li>
</ul> </ul>
<div ng-show='results.rows.length > 0'>
<pagination total-items="totalItems" items-per-page="itemsPerPage" ng-model="currentPage" ng-change="pageChanged()"></pagination>
<p>
</div>
...@@ -35,4 +35,4 @@ ...@@ -35,4 +35,4 @@
{% if (process.env.NODE_ENV == 'local') %} {% if (process.env.NODE_ENV == 'local') %}
<!-- Livereload script rendered --> <!-- Livereload script rendered -->
<script type="text/javascript" src="http://localhost:35730/livereload.js"></script> <script type="text/javascript" src="http://localhost:35730/livereload.js"></script>
{% endif %} {% endif %}
\ No newline at end of file
...@@ -17,33 +17,33 @@ ...@@ -17,33 +17,33 @@
--> -->
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<meta name="viewport" content="width=device-width,initial-scale=1"> <meta name="viewport" content="width=device-width,initial-scale=1">
<title>{{app.title}}</title> <title>{{app.title}}</title>
<meta http-equiv="Content-type" content="text/html;charset=UTF-8"> <meta http-equiv="Content-type" content="text/html;charset=UTF-8">
<link href="/img/favicon.ico" rel="shortcut icon" type="image/x-icon"> <link href="/img/favicon.ico" rel="shortcut icon" type="image/x-icon">
<link rel="stylesheet" href="/lib/bootstrap/dist/css/bootstrap.min.css"> <link rel="stylesheet" href="/lib/bootstrap/dist/css/bootstrap.min.css">
<link rel="stylesheet" href="/lib/font-awesome/css/font-awesome.min.css"> <link rel="stylesheet" href="/lib/font-awesome/css/font-awesome.min.css">
<link rel="stylesheet" href="/css/sticky-footer-navbar.css"> <link rel="stylesheet" href="/css/sticky-footer-navbar.css">
<link rel="stylesheet" href="/css/common.css"> <link rel="stylesheet" href="/css/common.css">
<link rel="stylesheet" href="/css/lineage.css"> <link rel="stylesheet" href="/css/lineage.css">
<link rel="stylesheet" href="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css"> <link rel="stylesheet" href="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script> <script src="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
<!--[if lt IE 9]> <!--[if lt IE 9]>
<script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script> <script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script>
<![endif]--> <![endif]-->
{% block content %} {% block content %}
<script type="text/javascript"> <script type="text/javascript">
window.user = {{JSON.stringify(user)}}; window.user = {{JSON.stringify(user)}};
window.renderErrors = {{JSON.stringify(renderErrors)}}; window.renderErrors = {{JSON.stringify(renderErrors)}};
</script> </script>
{% endblock %} {% endblock %}
</head> </head>
\ No newline at end of file
---+ Metadata Discovery
---++ Introduction
---++ Details
---+ Search
Atlas exposes search over the metadata in two ways:
* Search using DSL
* Full-text search
---++ Search DSL Grammar
The DSL exposes an SQL like query language for searching the metadata based on the type system.
The grammar for the DSL is below.
<verbatim>
queryWithPath: query ~ opt(WITHPATH)
query: rep1sep(singleQuery, opt(COMMA))
singleQuery: singleQrySrc ~ opt(loopExpression) ~ opt(selectClause)
singleQrySrc = FROM ~ fromSrc ~ opt(WHERE) ~ opt(expr ^? notIdExpression) |
WHERE ~ (expr ^? notIdExpression) |
expr ^? notIdExpression |
fromSrc ~ opt(WHERE) ~ opt(expr ^? notIdExpression)
fromSrc: identifier ~ AS ~ alias | identifier
loopExpression: LOOP ~ (LPAREN ~> query <~ RPAREN) ~ opt(intConstant <~ TIMES) ~ opt(AS ~> alias)
selectClause: SELECT ~ rep1sep(selectExpression, COMMA)
selectExpression: expr ~ opt(AS ~> alias)
expr: compE ~ opt(rep(exprRight))
exprRight: (AND | OR) ~ compE
compE:
arithE ~ (LT | LTE | EQ | NEQ | GT | GTE) ~ arithE |
arithE ~ (ISA | IS) ~ ident |
arithE ~ HAS ~ ident |
arithE
arithE: multiE ~ opt(rep(arithERight))
arithERight: (PLUS | MINUS) ~ multiE
multiE: atomE ~ opt(rep(multiERight))
multiERight: (STAR | DIV) ~ atomE
atomE: literal | identifier | LPAREN ~> expr <~ RPAREN
identifier: rep1sep(ident, DOT)
alias: ident | stringLit
literal: booleanConstant |
intConstant |
longConstant |
floatConstant |
doubleConstant |
stringLit
</verbatim>
Grammar language:
{noformat}
opt(a) => a is optional
~ => a combinator. 'a ~ b' means a followed by b
rep => zero or more
rep1sep => one or more, separated by second arg.
{noformat}
Language Notes:
* A *SingleQuery* expression can be used to search for entities of a _Trait_ or _Class_.
Entities can be filtered based on a 'Where Clause' and Entity Attributes can be retrieved based on a 'Select Clause'.
* An Entity Graph can be traversed/joined by combining one or more SingleQueries.
* An attempt is made to make the expressions look SQL like by accepting keywords "SELECT",
"FROM", and "WHERE"; but these are optional and users can simply think in terms of Entity Graph Traversals.
* The transitive closure of an Entity relationship can be expressed via the _Loop_ expression. A
_Loop_ expression can be any traversal (recursively a query) that represents a _Path_ that ends in an Entity of the same _Type_ as the starting Entity.
* The _WithPath_ clause can be used with transitive closure queries to retrieve the Path that
connects the two related Entities. (We also provide a higher level interface for Closure Queries see scaladoc for 'org.apache.metadata.query.ClosureQuery')
* There are couple of Predicate functions different from SQL:
* _is_ or _isa_can be used to filter Entities that have a particular Trait.
* _has_ can be used to filter Entities that have a value for a particular Attribute.
---+++ DSL Examples
* from DB
* DB where name="Reporting" select name, owner
* DB has name
* DB is JdbcAccess
* Column where Column isa PII
* Table where name="sales_fact", columns
* Table where name="sales_fact", columns as column select column.name, column.dataType, column.comment
---++ Full-text Search
Atlas also exposes a lucene style full-text search capability.
\ No newline at end of file
...@@ -44,7 +44,9 @@ allows integration with the whole enterprise data ecosystem. ...@@ -44,7 +44,9 @@ allows integration with the whole enterprise data ecosystem.
* [[Architecture][High Level Architecture]] * [[Architecture][High Level Architecture]]
* [[TypeSystem][Type System]] * [[TypeSystem][Type System]]
* [[Repository][Metadata Repository]] * [[Repository][Metadata Repository]]
* [[Discovery][Search]] * [[Search][Search]]
* [[security][security]]
* [[Configuration][Configuration]]
---++ API Documentation ---++ API Documentation
......
...@@ -78,7 +78,7 @@ ...@@ -78,7 +78,7 @@
<slf4j.version>1.7.7</slf4j.version> <slf4j.version>1.7.7</slf4j.version>
<jetty.version>6.1.26</jetty.version> <jetty.version>6.1.26</jetty.version>
<jersey.version>1.9</jersey.version> <jersey.version>1.9</jersey.version>
<tinkerpop.version>2.5.0</tinkerpop.version> <tinkerpop.version>2.6.0</tinkerpop.version>
<titan.version>0.5.4</titan.version> <titan.version>0.5.4</titan.version>
<hadoop.version>2.6.0</hadoop.version> <hadoop.version>2.6.0</hadoop.version>
...@@ -101,6 +101,9 @@ ...@@ -101,6 +101,9 @@
<StagingId>apache-staging</StagingId> <StagingId>apache-staging</StagingId>
<StagingName>Apache Release Distribution Repository</StagingName> <StagingName>Apache Release Distribution Repository</StagingName>
<StagingUrl>https://repository.apache.org/content/groups/staging</StagingUrl> <StagingUrl>https://repository.apache.org/content/groups/staging</StagingUrl>
<!-- skips checkstyle and find bugs -->
<skipCheck>false</skipCheck>
</properties> </properties>
<profiles> <profiles>
...@@ -611,6 +614,12 @@ ...@@ -611,6 +614,12 @@
<version>1.8.5</version> <version>1.8.5</version>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.4</version>
</dependency>
</dependencies> </dependencies>
</dependencyManagement> </dependencyManagement>
...@@ -971,6 +980,7 @@ ...@@ -971,6 +980,7 @@
</goals> </goals>
<phase>verify</phase> <phase>verify</phase>
<configuration> <configuration>
<skip>${skipCheck}</skip>
<consoleOutput>true</consoleOutput> <consoleOutput>true</consoleOutput>
<includeTestSourceDirectory>true</includeTestSourceDirectory> <includeTestSourceDirectory>true</includeTestSourceDirectory>
<configLocation>src/build/checkstyle.xml</configLocation> <configLocation>src/build/checkstyle.xml</configLocation>
...@@ -988,6 +998,7 @@ ...@@ -988,6 +998,7 @@
<!--debug>true</debug --> <!--debug>true</debug -->
<xmlOutput>true</xmlOutput> <xmlOutput>true</xmlOutput>
<failOnError>false</failOnError> <failOnError>false</failOnError>
<skip>${skipCheck}</skip>
</configuration> </configuration>
<executions> <executions>
<execution> <execution>
......
...@@ -22,6 +22,7 @@ import com.thinkaurelius.titan.core.TitanVertex; ...@@ -22,6 +22,7 @@ import com.thinkaurelius.titan.core.TitanVertex;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.query.Expressions; import org.apache.hadoop.metadata.query.Expressions;
import org.apache.hadoop.metadata.query.GraphPersistenceStrategies; import org.apache.hadoop.metadata.query.GraphPersistenceStrategies;
import org.apache.hadoop.metadata.query.GraphPersistenceStrategies$class;
import org.apache.hadoop.metadata.query.TypeUtils; import org.apache.hadoop.metadata.query.TypeUtils;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.Constants; import org.apache.hadoop.metadata.repository.Constants;
...@@ -71,7 +72,11 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi ...@@ -71,7 +72,11 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
@Override @Override
public String fieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) { public String fieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) {
return metadataRepository.getFieldNameInVertex(dataType, aInfo); try {
return metadataRepository.getFieldNameInVertex(dataType, aInfo);
} catch (MetadataException e) {
throw new RuntimeException(e);
}
} }
@Override @Override
...@@ -164,33 +169,12 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi ...@@ -164,33 +169,12 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
@Override @Override
public String gremlinCompOp(Expressions.ComparisonExpression op) { public String gremlinCompOp(Expressions.ComparisonExpression op) {
switch (op.symbol()) { return GraphPersistenceStrategies$class.gremlinCompOp(this, op);
case "=":
return "T.eq";
case "!=":
return "T.neq";
case ">":
return "T.gt";
case ">=":
return "T.gte";
case "<":
return "T.lt";
case "<=":
return "T.lte";
default:
throw new RuntimeException(("Comparison operator not supported in Gremlin: " + op));
}
} }
@Override @Override
public String loopObjectExpression(IDataType<?> dataType) { public String loopObjectExpression(IDataType<?> dataType) {
return "{it.object." + typeAttributeName() + " == '" + dataType.getName() + "'}"; return GraphPersistenceStrategies$class.loopObjectExpression(this, dataType);
} }
@Override @Override
...@@ -202,4 +186,9 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi ...@@ -202,4 +186,9 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
@Override @Override
public String idAttributeName() { return metadataRepository.getIdAttributeName(); } public String idAttributeName() { return metadataRepository.getIdAttributeName(); }
@Override
public String typeTestExpression(String typeName) {
return GraphPersistenceStrategies$class.typeTestExpression(this, typeName);
}
} }
...@@ -23,6 +23,9 @@ import com.thinkaurelius.titan.core.TitanIndexQuery; ...@@ -23,6 +23,9 @@ import com.thinkaurelius.titan.core.TitanIndexQuery;
import com.thinkaurelius.titan.core.TitanProperty; import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex; import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.gremlin.groovy.Gremlin;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.discovery.DiscoveryException; import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.DiscoveryService; import org.apache.hadoop.metadata.discovery.DiscoveryService;
import org.apache.hadoop.metadata.query.Expressions; import org.apache.hadoop.metadata.query.Expressions;
...@@ -66,6 +69,8 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -66,6 +69,8 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
private final TitanGraph titanGraph; private final TitanGraph titanGraph;
private final DefaultGraphPersistenceStrategy graphPersistenceStrategy; private final DefaultGraphPersistenceStrategy graphPersistenceStrategy;
public final static String SCORE = "score";
@Inject @Inject
GraphBackedDiscoveryService(GraphProvider<TitanGraph> graphProvider, GraphBackedDiscoveryService(GraphProvider<TitanGraph> graphProvider,
MetadataRepository metadataRepository) throws DiscoveryException { MetadataRepository metadataRepository) throws DiscoveryException {
...@@ -93,8 +98,8 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -93,8 +98,8 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
if (guid != null) { //Filter non-class entities if (guid != null) { //Filter non-class entities
try { try {
row.put("guid", guid); row.put("guid", guid);
row.put("typeName", vertex.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)); row.put(MetadataServiceClient.TYPENAME, vertex.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY));
row.put("score", result.getScore()); row.put(SCORE, result.getScore());
} catch (JSONException e) { } catch (JSONException e) {
LOG.error("Unable to create response", e); LOG.error("Unable to create response", e);
throw new DiscoveryException("Unable to create response"); throw new DiscoveryException("Unable to create response");
......
...@@ -23,41 +23,44 @@ public final class Constants { ...@@ -23,41 +23,44 @@ public final class Constants {
/** /**
* Globally Unique identifier property key. * Globally Unique identifier property key.
*/ */
public static final String GUID_PROPERTY_KEY = "guid";
public static final String INTERNAL_PROPERTY_KEY_PREFIX = "__";
public static final String GUID_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "guid";
public static final String GUID_INDEX = "guid_index"; public static final String GUID_INDEX = "guid_index";
/** /**
* Entity type name property key. * Entity type name property key.
*/ */
public static final String ENTITY_TYPE_PROPERTY_KEY = "typeName"; public static final String ENTITY_TYPE_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "typeName";
public static final String ENTITY_TYPE_INDEX = "type_index"; public static final String ENTITY_TYPE_INDEX = "type_index";
/** /**
* Entity type's super types property key. * Entity type's super types property key.
*/ */
public static final String SUPER_TYPES_PROPERTY_KEY = "superTypeNames"; public static final String SUPER_TYPES_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "superTypeNames";
public static final String SUPER_TYPES_INDEX = "super_types_index"; public static final String SUPER_TYPES_INDEX = "super_types_index";
/** /**
* Full-text for the entity for enabling full-text search. * Full-text for the entity for enabling full-text search.
*/ */
//weird issue in TitanDB if __ added to this property key. Not adding it for now
public static final String ENTITY_TEXT_PROPERTY_KEY = "entityText"; public static final String ENTITY_TEXT_PROPERTY_KEY = "entityText";
/** /**
* Properties for type store graph * Properties for type store graph
*/ */
public static final String TYPE_CATEGORY_PROPERTY_KEY = "type.category"; public static final String TYPE_CATEGORY_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "type.category";
public static final String VERTEX_TYPE_PROPERTY_KEY = "type"; public static final String VERTEX_TYPE_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "type";
public static final String TYPENAME_PROPERTY_KEY = "type.name"; public static final String TYPENAME_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "type.name";
/** /**
* Trait names property key and index name. * Trait names property key and index name.
*/ */
public static final String TRAIT_NAMES_PROPERTY_KEY = "traitNames"; public static final String TRAIT_NAMES_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "traitNames";
public static final String TRAIT_NAMES_INDEX = "trait_names_index"; public static final String TRAIT_NAMES_INDEX = "trait_names_index";
public static final String VERSION_PROPERTY_KEY = "version"; public static final String VERSION_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "version";
public static final String TIMESTAMP_PROPERTY_KEY = "timestamp"; public static final String TIMESTAMP_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "timestamp";
/** /**
* search backing index name. * search backing index name.
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.repository; package org.apache.hadoop.metadata.repository;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.IReferenceableInstance; import org.apache.hadoop.metadata.typesystem.IReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.hadoop.metadata.typesystem.ITypedStruct;
...@@ -61,7 +62,7 @@ public interface MetadataRepository { ...@@ -61,7 +62,7 @@ public interface MetadataRepository {
* @param aInfo attribute info * @param aInfo attribute info
* @return property key used to store a given attribute * @return property key used to store a given attribute
*/ */
String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo); String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) throws MetadataException;
/** /**
* Return the edge label for a given attribute in the repository. * Return the edge label for a given attribute in the repository.
......
...@@ -40,6 +40,7 @@ import org.apache.hadoop.metadata.typesystem.persistence.MapIds; ...@@ -40,6 +40,7 @@ import org.apache.hadoop.metadata.typesystem.persistence.MapIds;
import org.apache.hadoop.metadata.typesystem.types.AttributeInfo; import org.apache.hadoop.metadata.typesystem.types.AttributeInfo;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumType;
import org.apache.hadoop.metadata.typesystem.types.EnumValue; import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalType; import org.apache.hadoop.metadata.typesystem.types.HierarchicalType;
import org.apache.hadoop.metadata.typesystem.types.IDataType; import org.apache.hadoop.metadata.typesystem.types.IDataType;
...@@ -55,12 +56,7 @@ import javax.inject.Inject; ...@@ -55,12 +56,7 @@ import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
/** /**
...@@ -122,8 +118,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -122,8 +118,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
} }
@Override @Override
public String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) { public String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) throws MetadataException {
return dataType.getName() + "." + aInfo.name; return getQualifiedName(dataType, aInfo.name);
} }
@Override @Override
...@@ -693,7 +689,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -693,7 +689,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
break; break;
case ENUM: case ENUM:
addProperty(instanceVertex, propertyName, typedInstance.getInt(attributeInfo.name)); //handles both int and string for enum
EnumValue enumValue = (EnumValue) dataType.convert(typedInstance.get(attributeInfo.name),
Multiplicity.REQUIRED);
addProperty(instanceVertex, propertyName, enumValue.value);
break; break;
case ARRAY: case ARRAY:
...@@ -745,18 +744,15 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -745,18 +744,15 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
String propertyName = getQualifiedName(typedInstance, attributeInfo); String propertyName = getQualifiedName(typedInstance, attributeInfo);
IDataType elementType = ((DataTypes.ArrayType) attributeInfo.dataType()).getElemType(); IDataType elementType = ((DataTypes.ArrayType) attributeInfo.dataType()).getElemType();
StringBuilder buffer = new StringBuilder(); List<String> values = new ArrayList(list.size());
Object[] array = list.toArray(); for (int index = 0; index < list.size(); index++) {
for (int index = 0; index < array.length; index++) {
String entryId = mapCollectionEntryToVertex(id, instanceVertex, String entryId = mapCollectionEntryToVertex(id, instanceVertex,
attributeInfo, idToVertexMap, elementType, array[index], attributeInfo, idToVertexMap, elementType, list.get(index), propertyName);
propertyName, String.valueOf(index)); values.add(entryId);
buffer.append(entryId).append(",");
} }
buffer.setLength(buffer.length() - 1);
// for dereference on way out // for dereference on way out
addProperty(instanceVertex, propertyName, buffer.toString()); addProperty(instanceVertex, propertyName, values);
} }
private void mapMapCollectionToVertex(Id id, ITypedInstance typedInstance, private void mapMapCollectionToVertex(Id id, ITypedInstance typedInstance,
...@@ -774,33 +770,27 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -774,33 +770,27 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
} }
String propertyName = getQualifiedName(typedInstance, attributeInfo); String propertyName = getQualifiedName(typedInstance, attributeInfo);
StringBuilder buffer = new StringBuilder();
IDataType elementType = ((DataTypes.MapType) attributeInfo.dataType()).getValueType(); IDataType elementType = ((DataTypes.MapType) attributeInfo.dataType()).getValueType();
for (Map.Entry entry : collection.entrySet()) { for (Map.Entry entry : collection.entrySet()) {
String entryId = mapCollectionEntryToVertex(id, instanceVertex, attributeInfo, String myPropertyName = propertyName + "." + entry.getKey().toString();
idToVertexMap, elementType, entry.getValue(), mapCollectionEntryToVertex(id, instanceVertex, attributeInfo,
propertyName, String.valueOf(entry.getKey())); idToVertexMap, elementType, entry.getValue(), myPropertyName);
buffer.append(entryId).append(",");
} }
buffer.setLength(buffer.length() - 1);
// for dereference on way out // for dereference on way out
addProperty(instanceVertex, propertyName, buffer.toString()); addProperty(instanceVertex, propertyName, new ArrayList(collection.keySet()));
} }
private String mapCollectionEntryToVertex(Id id, Vertex instanceVertex, private String mapCollectionEntryToVertex(Id id, Vertex instanceVertex,
AttributeInfo attributeInfo, AttributeInfo attributeInfo,
Map<Id, Vertex> idToVertexMap, Map<Id, Vertex> idToVertexMap,
IDataType elementType, Object value, IDataType elementType, Object value,
String propertyName, String propertyName) throws MetadataException {
String key) throws MetadataException {
final String propertyNameWithSuffix = propertyName + "." + key;
final String edgeLabel = EDGE_LABEL_PREFIX + propertyName; final String edgeLabel = EDGE_LABEL_PREFIX + propertyName;
switch (elementType.getTypeCategory()) { switch (elementType.getTypeCategory()) {
case PRIMITIVE: case PRIMITIVE:
case ENUM: case ENUM:
addProperty(instanceVertex, propertyNameWithSuffix, value); return value.toString();
return propertyNameWithSuffix;
case ARRAY: case ARRAY:
case MAP: case MAP:
...@@ -814,13 +804,12 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -814,13 +804,12 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
// add an edge to the newly created vertex from the parent // add an edge to the newly created vertex from the parent
Edge structElementEdge = GraphHelper.addEdge( Edge structElementEdge = GraphHelper.addEdge(
titanGraph, instanceVertex, structInstanceVertex, edgeLabel); titanGraph, instanceVertex, structInstanceVertex, edgeLabel);
return propertyName + "." + key + ":" + structElementEdge.getId(); return structElementEdge.getId().toString();
case CLASS: case CLASS:
Id referenceId = (Id) value; Id referenceId = (Id) value;
String edgeId = mapClassReferenceAsEdge( return mapClassReferenceAsEdge(
instanceVertex, idToVertexMap, edgeLabel, referenceId); instanceVertex, idToVertexMap, edgeLabel, referenceId);
return propertyName + "." + key + ":" + edgeId;
default: default:
throw new IllegalArgumentException("Unknown type category: " throw new IllegalArgumentException("Unknown type category: "
...@@ -930,6 +919,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -930,6 +919,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
propertyValue = typedInstance.getDouble(attributeInfo.name); propertyValue = typedInstance.getDouble(attributeInfo.name);
} else if (attributeInfo.dataType() == DataTypes.BIGDECIMAL_TYPE) { } else if (attributeInfo.dataType() == DataTypes.BIGDECIMAL_TYPE) {
propertyValue = typedInstance.getBigDecimal(attributeInfo.name); propertyValue = typedInstance.getBigDecimal(attributeInfo.name);
} else if (attributeInfo.dataType() == DataTypes.DATE_TYPE) {
propertyValue = typedInstance.getDate(attributeInfo.name);
} }
addProperty(instanceVertex, vertexPropertyName, propertyValue); addProperty(instanceVertex, vertexPropertyName, propertyValue);
} }
...@@ -1003,8 +994,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1003,8 +994,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return; return;
} }
typedInstance.setInt(attributeInfo.name, typedInstance.set(attributeInfo.name, dataType.convert(instanceVertex.<String>getProperty
instanceVertex.<Integer>getProperty(vertexPropertyName)); (vertexPropertyName), Multiplicity.REQUIRED));
break; break;
case ARRAY: case ARRAY:
...@@ -1071,17 +1062,17 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1071,17 +1062,17 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
AttributeInfo attributeInfo, AttributeInfo attributeInfo,
String propertyName) throws MetadataException { String propertyName) throws MetadataException {
LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name); LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name);
String keys = instanceVertex.getProperty(propertyName); List list = instanceVertex.getProperty(propertyName);
if (keys == null || keys.length() == 0) { if (list == null || list.size() == 0) {
return; return;
} }
DataTypes.ArrayType arrayType = (DataTypes.ArrayType) attributeInfo.dataType(); DataTypes.ArrayType arrayType = (DataTypes.ArrayType) attributeInfo.dataType();
final IDataType elementType = arrayType.getElemType(); final IDataType elementType = arrayType.getElemType();
ArrayList values = new ArrayList(); ArrayList values = new ArrayList();
for (String propertyNameWithSuffix : keys.split(",")) { for (Object listElement : list) {
values.add(mapVertexToCollectionEntry(instanceVertex, attributeInfo, values.add(mapVertexToCollectionEntry(instanceVertex, attributeInfo, elementType, listElement,
elementType, propertyName, propertyNameWithSuffix)); propertyName));
} }
typedInstance.set(attributeInfo.name, values); typedInstance.set(attributeInfo.name, values);
...@@ -1089,20 +1080,13 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1089,20 +1080,13 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
public Object mapVertexToCollectionEntry(Vertex instanceVertex, public Object mapVertexToCollectionEntry(Vertex instanceVertex,
AttributeInfo attributeInfo, AttributeInfo attributeInfo,
IDataType elementType, IDataType elementType, Object value, String propertyName)
String propertyName,
String propertyNameWithSuffix)
throws MetadataException { throws MetadataException {
String edgeLabel = EDGE_LABEL_PREFIX + propertyName;
final String edgeLabel = EDGE_LABEL_PREFIX + propertyName;
final String edgeId = propertyNameWithSuffix
.substring(propertyNameWithSuffix.lastIndexOf(":") + 1, propertyNameWithSuffix.length());
switch (elementType.getTypeCategory()) { switch (elementType.getTypeCategory()) {
case PRIMITIVE: case PRIMITIVE:
return instanceVertex.getProperty(propertyNameWithSuffix);
case ENUM: case ENUM:
return instanceVertex.<Integer>getProperty(propertyNameWithSuffix); return value;
case ARRAY: case ARRAY:
case MAP: case MAP:
...@@ -1112,11 +1096,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1112,11 +1096,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
case STRUCT: case STRUCT:
return getStructInstanceFromVertex(instanceVertex, return getStructInstanceFromVertex(instanceVertex,
elementType, attributeInfo.name, edgeLabel, edgeId); elementType, attributeInfo.name, edgeLabel, (String) value);
case CLASS: case CLASS:
return mapClassReferenceToVertex( return mapClassReferenceToVertex(
instanceVertex, attributeInfo, edgeLabel, elementType, edgeId); instanceVertex, attributeInfo, edgeLabel, elementType, (String) value);
default: default:
break; break;
...@@ -1130,8 +1114,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1130,8 +1114,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
AttributeInfo attributeInfo, AttributeInfo attributeInfo,
String propertyName) throws MetadataException { String propertyName) throws MetadataException {
LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name); LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name);
String keys = instanceVertex.getProperty(propertyName); List<String> keys = instanceVertex.getProperty(propertyName);
if (keys == null || keys.length() == 0) { if (keys == null || keys.size() == 0) {
return; return;
} }
DataTypes.MapType mapType = (DataTypes.MapType) attributeInfo.dataType(); DataTypes.MapType mapType = (DataTypes.MapType) attributeInfo.dataType();
...@@ -1139,10 +1123,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1139,10 +1123,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
final IDataType valueType = mapType.getValueType(); final IDataType valueType = mapType.getValueType();
HashMap values = new HashMap(); HashMap values = new HashMap();
for (String propertyNameWithSuffix : keys.split(",")) { for (String key : keys) {
final String key = extractKey(propertyNameWithSuffix, keyType);
values.put(key, mapVertexToCollectionEntry(instanceVertex, attributeInfo, values.put(key, mapVertexToCollectionEntry(instanceVertex, attributeInfo,
valueType, propertyName, propertyNameWithSuffix)); valueType, propertyName, propertyName));
} }
typedInstance.set(attributeInfo.name, values); typedInstance.set(attributeInfo.name, values);
...@@ -1158,8 +1141,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1158,8 +1141,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
private ITypedStruct getStructInstanceFromVertex(Vertex instanceVertex, private ITypedStruct getStructInstanceFromVertex(Vertex instanceVertex,
IDataType elemType, IDataType elemType,
String attributeName, String attributeName, String relationshipLabel,
String relationshipLabel,
String edgeId) throws MetadataException { String edgeId) throws MetadataException {
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel); LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) { for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) {
...@@ -1310,6 +1292,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1310,6 +1292,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
} else if (attributeInfo.dataType() == DataTypes.BIGDECIMAL_TYPE) { } else if (attributeInfo.dataType() == DataTypes.BIGDECIMAL_TYPE) {
typedInstance.setBigDecimal(attributeInfo.name, typedInstance.setBigDecimal(attributeInfo.name,
instanceVertex.<BigDecimal>getProperty(vertexPropertyName)); instanceVertex.<BigDecimal>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.DATE_TYPE) {
typedInstance.setDate(attributeInfo.name,
instanceVertex.<Date>getProperty(vertexPropertyName));
} }
} }
} }
......
...@@ -45,6 +45,7 @@ import org.slf4j.LoggerFactory; ...@@ -45,6 +45,7 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject; import javax.inject.Inject;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.Date;
import java.util.Map; import java.util.Map;
/** /**
...@@ -199,7 +200,7 @@ public class GraphBackedSearchIndexer implements SearchIndexer { ...@@ -199,7 +200,7 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
break; break;
case ENUM: case ENUM:
createVertexMixedIndex(propertyName, Integer.class); createVertexMixedIndex(propertyName, String.class);
break; break;
case ARRAY: case ARRAY:
...@@ -249,8 +250,11 @@ public class GraphBackedSearchIndexer implements SearchIndexer { ...@@ -249,8 +250,11 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
return Double.class; return Double.class;
} else if (dataType == DataTypes.BIGDECIMAL_TYPE) { } else if (dataType == DataTypes.BIGDECIMAL_TYPE) {
return BigDecimal.class; return BigDecimal.class;
} else if (dataType == DataTypes.DATE_TYPE) {
return Date.class;
} }
throw new IllegalArgumentException("unknown data type " + dataType); throw new IllegalArgumentException("unknown data type " + dataType);
} }
......
...@@ -53,8 +53,8 @@ import java.util.Iterator; ...@@ -53,8 +53,8 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
public class GraphBackedTypeStore implements ITypeStore { public class GraphBackedTypeStore implements ITypeStore {
public static final String VERTEX_TYPE = "typeSystem"; public static final String VERTEX_TYPE = Constants.INTERNAL_PROPERTY_KEY_PREFIX + "typeSystem";
private static final String PROPERTY_PREFIX = "type."; private static final String PROPERTY_PREFIX = Constants.INTERNAL_PROPERTY_KEY_PREFIX + "type.";
public static final String SUPERTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".supertype"; public static final String SUPERTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".supertype";
public static final String SUBTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".subtype"; public static final String SUBTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".subtype";
...@@ -188,7 +188,7 @@ public class GraphBackedTypeStore implements ITypeStore { ...@@ -188,7 +188,7 @@ public class GraphBackedTypeStore implements ITypeStore {
break; break;
default: default:
throw new IllegalArgumentException("Unhandled type category " + attrDataType.getTypeCategory()); throw new IllegalArgumentException("Attribute cannot reference instances of type : " + attrDataType.getTypeCategory());
} }
for (IDataType attrType : attrDataTypes) { for (IDataType attrType : attrDataTypes) {
......
...@@ -20,8 +20,9 @@ package org.apache.hadoop.metadata.services; ...@@ -20,8 +20,9 @@ package org.apache.hadoop.metadata.services;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.inject.Injector;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.classification.InterfaceAudience;
import org.apache.hadoop.metadata.discovery.SearchIndexer; import org.apache.hadoop.metadata.discovery.SearchIndexer;
import org.apache.hadoop.metadata.listener.EntityChangeListener; import org.apache.hadoop.metadata.listener.EntityChangeListener;
import org.apache.hadoop.metadata.listener.TypesChangeListener; import org.apache.hadoop.metadata.listener.TypesChangeListener;
...@@ -33,14 +34,20 @@ import org.apache.hadoop.metadata.typesystem.Referenceable; ...@@ -33,14 +34,20 @@ import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization; import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.Serialization$;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.types.*; import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import scala.tools.cmd.Meta;
import javax.inject.Inject; import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
...@@ -83,12 +90,38 @@ public class DefaultMetadataService implements MetadataService { ...@@ -83,12 +90,38 @@ public class DefaultMetadataService implements MetadataService {
try { try {
TypesDef typesDef = typeStore.restore(); TypesDef typesDef = typeStore.restore();
typeSystem.defineTypes(typesDef); typeSystem.defineTypes(typesDef);
createSuperTypes();
} catch (MetadataException e) { } catch (MetadataException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
LOG.info("Restored type system from the store"); LOG.info("Restored type system from the store");
} }
private static final AttributeDefinition NAME_ATTRIBUTE =
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE);
private static final AttributeDefinition DESCRIPTION_ATTRIBUTE =
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE);
private static final String[] SUPER_TYPES = {
"DataSet",
"Process",
"Infrastructure",
};
@InterfaceAudience.Private
public void createSuperTypes() throws MetadataException {
if (typeSystem.isRegistered(SUPER_TYPES[0])) {
return; // this is already registered
}
for (String superTypeName : SUPER_TYPES) {
HierarchicalTypeDefinition<ClassType> superTypeDefinition =
TypesUtil.createClassTypeDef(superTypeName,
ImmutableList.<String>of(),
NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE);
typeSystem.defineClassType(superTypeDefinition);
}
}
/** /**
* Creates a new type based on the type system to enable adding * Creates a new type based on the type system to enable adding
...@@ -107,19 +140,15 @@ public class DefaultMetadataService implements MetadataService { ...@@ -107,19 +140,15 @@ public class DefaultMetadataService implements MetadataService {
if(typesDef.isEmpty()) if(typesDef.isEmpty())
throw new MetadataException("Invalid type definition"); throw new MetadataException("Invalid type definition");
Map<String, IDataType> typesAdded = typeSystem.defineTypes(typesDef); final Map<String, IDataType> typesAdded = typeSystem.defineTypes(typesDef);
//TODO how do we handle transaction - store failure?? //TODO how do we handle transaction - store failure??
typeStore.store(typeSystem, ImmutableList.copyOf(typesAdded.keySet())); typeStore.store(typeSystem, ImmutableList.copyOf(typesAdded.keySet()));
onTypesAddedToRepo(typesAdded); onTypesAddedToRepo(typesAdded);
return new JSONObject() {{
JSONObject response = new JSONObject(); put(MetadataServiceClient.TYPES, typesAdded.keySet());
for (Map.Entry<String, IDataType> entry : typesAdded.entrySet()) { }};
response.put(entry.getKey(), entry.getValue().getName());
}
return response;
} catch (JSONException e) { } catch (JSONException e) {
LOG.error("Unable to create response for types={}", typeDefinition, e); LOG.error("Unable to create response for types={}", typeDefinition, e);
throw new MetadataException("Unable to create response"); throw new MetadataException("Unable to create response");
......
...@@ -112,8 +112,19 @@ trait GraphPersistenceStrategies { ...@@ -112,8 +112,19 @@ trait GraphPersistenceStrategies {
} }
def loopObjectExpression(dataType: IDataType[_]) = { def loopObjectExpression(dataType: IDataType[_]) = {
s"{it.object.'${typeAttributeName}' == '${dataType.getName}'}" _typeTestExpression(dataType.getName, "it.object")
} }
def typeTestExpression(typeName : String) :String = {
_typeTestExpression(typeName, "it")
}
private def _typeTestExpression(typeName: String, itRef: String): String = {
s"""{(${itRef}.'${typeAttributeName}' == '${typeName}') |
|(${itRef}.'${superTypeAttributeName}' ?
|${itRef}.'${superTypeAttributeName}'.contains('${typeName}') : false)}""".
stripMargin.replace(System.getProperty("line.separator"), "")
}
} }
object GraphPersistenceStrategy1 extends GraphPersistenceStrategies { object GraphPersistenceStrategy1 extends GraphPersistenceStrategies {
......
...@@ -184,8 +184,10 @@ class GremlinTranslator(expr: Expression, ...@@ -184,8 +184,10 @@ class GremlinTranslator(expr: Expression,
} }
private def genQuery(expr: Expression, inSelect: Boolean): String = expr match { private def genQuery(expr: Expression, inSelect: Boolean): String = expr match {
case ClassExpression(clsName) => s"""has("${gPersistenceBehavior.typeAttributeName}","$clsName")""" case ClassExpression(clsName) =>
case TraitExpression(clsName) => s"""has("${gPersistenceBehavior.typeAttributeName}","$clsName")""" s"""filter${gPersistenceBehavior.typeTestExpression(clsName)}"""
case TraitExpression(clsName) =>
s"""filter${gPersistenceBehavior.typeTestExpression(clsName)}"""
case fe@FieldExpression(fieldName, fInfo, child) if fe.dataType.getTypeCategory == TypeCategory.PRIMITIVE => { case fe@FieldExpression(fieldName, fInfo, child) if fe.dataType.getTypeCategory == TypeCategory.PRIMITIVE => {
val fN = "\"" + gPersistenceBehavior.fieldNameInVertex(fInfo.dataType, fInfo.attrInfo) + "\"" val fN = "\"" + gPersistenceBehavior.fieldNameInVertex(fInfo.dataType, fInfo.attrInfo) + "\""
child match { child match {
......
...@@ -29,7 +29,6 @@ import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService; ...@@ -29,7 +29,6 @@ import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.query.HiveTitanSample; import org.apache.hadoop.metadata.query.HiveTitanSample;
import org.apache.hadoop.metadata.query.QueryTestsUtils; import org.apache.hadoop.metadata.query.QueryTestsUtils;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository; import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphBackedSearchIndexer;
import org.apache.hadoop.metadata.repository.graph.GraphHelper; import org.apache.hadoop.metadata.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.repository.graph.GraphProvider; import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
......
...@@ -57,6 +57,11 @@ import java.util.List; ...@@ -57,6 +57,11 @@ import java.util.List;
@Guice(modules = RepositoryMetadataModule.class) @Guice(modules = RepositoryMetadataModule.class)
public class HiveLineageServiceTest { public class HiveLineageServiceTest {
static {
// this would override super types creation if not first thing
TypeSystem.getInstance().reset();
}
@Inject @Inject
private DefaultMetadataService metadataService; private DefaultMetadataService metadataService;
...@@ -71,8 +76,6 @@ public class HiveLineageServiceTest { ...@@ -71,8 +76,6 @@ public class HiveLineageServiceTest {
@BeforeClass @BeforeClass
public void setUp() throws Exception { public void setUp() throws Exception {
TypeSystem.getInstance().reset();
setUpTypes(); setUpTypes();
setupInstances(); setupInstances();
...@@ -166,6 +169,23 @@ public class HiveLineageServiceTest { ...@@ -166,6 +169,23 @@ public class HiveLineageServiceTest {
} }
@Test @Test
public void testGetInputsGraph() throws Exception {
JSONObject results = new JSONObject(
hiveLineageService.getInputsGraph("sales_fact_monthly_mv"));
Assert.assertNotNull(results);
System.out.println("inputs graph = " + results);
JSONObject values = results.getJSONObject("values");
Assert.assertNotNull(values);
final JSONObject vertices = values.getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 4);
final JSONObject edges = values.getJSONObject("edges");
Assert.assertEquals(edges.length(), 4);
}
@Test
public void testGetOutputs() throws Exception { public void testGetOutputs() throws Exception {
JSONObject results = new JSONObject(hiveLineageService.getOutputs("sales_fact")); JSONObject results = new JSONObject(hiveLineageService.getOutputs("sales_fact"));
Assert.assertNotNull(results); Assert.assertNotNull(results);
...@@ -179,6 +199,22 @@ public class HiveLineageServiceTest { ...@@ -179,6 +199,22 @@ public class HiveLineageServiceTest {
Assert.assertTrue(paths.length() > 0); Assert.assertTrue(paths.length() > 0);
} }
@Test
public void testGetOutputsGraph() throws Exception {
JSONObject results = new JSONObject(hiveLineageService.getOutputsGraph("sales_fact"));
Assert.assertNotNull(results);
System.out.println("outputs graph = " + results);
JSONObject values = results.getJSONObject("values");
Assert.assertNotNull(values);
final JSONObject vertices = values.getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 3);
final JSONObject edges = values.getJSONObject("edges");
Assert.assertEquals(edges.length(), 4);
}
@DataProvider(name = "tableNamesProvider") @DataProvider(name = "tableNamesProvider")
private Object[][] tableNames() { private Object[][] tableNames() {
return new String[][] { return new String[][] {
...@@ -247,9 +283,7 @@ public class HiveLineageServiceTest { ...@@ -247,9 +283,7 @@ public class HiveLineageServiceTest {
); );
HierarchicalTypeDefinition<ClassType> tblClsDef = HierarchicalTypeDefinition<ClassType> tblClsDef =
TypesUtil.createClassTypeDef(HIVE_TABLE_TYPE, null, TypesUtil.createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"),
attrDef("name", DataTypes.STRING_TYPE),
attrDef("description", DataTypes.STRING_TYPE),
attrDef("owner", DataTypes.STRING_TYPE), attrDef("owner", DataTypes.STRING_TYPE),
attrDef("createTime", DataTypes.INT_TYPE), attrDef("createTime", DataTypes.INT_TYPE),
attrDef("lastAccessTime", DataTypes.INT_TYPE), attrDef("lastAccessTime", DataTypes.INT_TYPE),
...@@ -265,8 +299,7 @@ public class HiveLineageServiceTest { ...@@ -265,8 +299,7 @@ public class HiveLineageServiceTest {
); );
HierarchicalTypeDefinition<ClassType> loadProcessClsDef = HierarchicalTypeDefinition<ClassType> loadProcessClsDef =
TypesUtil.createClassTypeDef(HIVE_PROCESS_TYPE, null, TypesUtil.createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"),
attrDef("name", DataTypes.STRING_TYPE),
attrDef("userName", DataTypes.STRING_TYPE), attrDef("userName", DataTypes.STRING_TYPE),
attrDef("startTime", DataTypes.INT_TYPE), attrDef("startTime", DataTypes.INT_TYPE),
attrDef("endTime", DataTypes.INT_TYPE), attrDef("endTime", DataTypes.INT_TYPE),
...@@ -368,7 +401,7 @@ public class HiveLineageServiceTest { ...@@ -368,7 +401,7 @@ public class HiveLineageServiceTest {
"sales fact daily materialized view", "sales fact daily materialized view",
reportingDB, sd, "Joe BI", "Managed", salesFactColumns, "Metric"); reportingDB, sd, "Joe BI", "Managed", salesFactColumns, "Metric");
loadProcess("loadSalesDaily", "John ETL", loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL",
ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily), ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily),
"create table as select ", "plan", "id", "graph", "create table as select ", "plan", "id", "graph",
"ETL"); "ETL");
...@@ -401,7 +434,7 @@ public class HiveLineageServiceTest { ...@@ -401,7 +434,7 @@ public class HiveLineageServiceTest {
"sales fact monthly materialized view", "sales fact monthly materialized view",
reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric"); reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric");
loadProcess("loadSalesMonthly", "John ETL", loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL",
ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly), ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly),
"create table as select ", "plan", "id", "graph", "create table as select ", "plan", "id", "graph",
"ETL"); "ETL");
...@@ -463,7 +496,7 @@ public class HiveLineageServiceTest { ...@@ -463,7 +496,7 @@ public class HiveLineageServiceTest {
return createInstance(referenceable); return createInstance(referenceable);
} }
Id loadProcess(String name, String user, Id loadProcess(String name, String description, String user,
List<Id> inputTables, List<Id> inputTables,
List<Id> outputTables, List<Id> outputTables,
String queryText, String queryPlan, String queryText, String queryPlan,
...@@ -471,6 +504,7 @@ public class HiveLineageServiceTest { ...@@ -471,6 +504,7 @@ public class HiveLineageServiceTest {
String... traitNames) throws Exception { String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames); Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set("name", name); referenceable.set("name", name);
referenceable.set("description", description);
referenceable.set("user", user); referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis()); referenceable.set("startTime", System.currentTimeMillis());
referenceable.set("endTime", System.currentTimeMillis() + 10000); referenceable.set("endTime", System.currentTimeMillis() + 10000);
......
...@@ -49,6 +49,8 @@ public abstract class BaseTest { ...@@ -49,6 +49,8 @@ public abstract class BaseTest {
public static final String STRUCT_TYPE_1 = "t1"; public static final String STRUCT_TYPE_1 = "t1";
public static final String STRUCT_TYPE_2 = "t2"; public static final String STRUCT_TYPE_2 = "t2";
public static final String TEST_DATE = "2014-12-11T02:35:58.440Z";
public static final long TEST_DATE_IN_LONG=1418265358440L;
protected IRepository repo; protected IRepository repo;
public static Struct createStruct() throws MetadataException { public static Struct createStruct() throws MetadataException {
......
...@@ -23,9 +23,11 @@ import com.thinkaurelius.titan.core.TitanGraph; ...@@ -23,9 +23,11 @@ import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Compare; import com.tinkerpop.blueprints.Compare;
import com.tinkerpop.blueprints.GraphQuery; import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.metadata.RepositoryMetadataModule; import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.TestUtils; import org.apache.hadoop.metadata.TestUtils;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService; import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.repository.BaseTest;
import org.apache.hadoop.metadata.repository.Constants; import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.RepositoryException; import org.apache.hadoop.metadata.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
...@@ -53,11 +55,10 @@ import org.testng.annotations.Test; ...@@ -53,11 +55,10 @@ import org.testng.annotations.Test;
import scala.actors.threadpool.Arrays; import scala.actors.threadpool.Arrays;
import javax.inject.Inject; import javax.inject.Inject;
import java.util.ArrayList; import java.util.*;
import java.util.HashMap;
import java.util.Iterator; import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
import java.util.List; import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createStructTypeDef;
import java.util.UUID;
/** /**
* GraphBackedMetadataRepository test * GraphBackedMetadataRepository test
...@@ -154,6 +155,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -154,6 +155,7 @@ public class GraphBackedMetadataRepositoryTest {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE); Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
databaseInstance.set("name", DATABASE_NAME); databaseInstance.set("name", DATABASE_NAME);
databaseInstance.set("description", "foo database"); databaseInstance.set("description", "foo database");
databaseInstance.set("created", new Date(BaseTest.TEST_DATE_IN_LONG));
databaseInstance.set("namespace", "colo:cluster:hive:db"); databaseInstance.set("namespace", "colo:cluster:hive:db");
databaseInstance.set("cluster", "cluster-1"); databaseInstance.set("cluster", "cluster-1");
...@@ -180,6 +182,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -180,6 +182,7 @@ public class GraphBackedMetadataRepositoryTest {
String guid = getGUID(); String guid = getGUID();
ITypedReferenceableInstance table = repositoryService.getEntityDefinition(guid); ITypedReferenceableInstance table = repositoryService.getEntityDefinition(guid);
Assert.assertEquals(table.getDate("created"), new Date(BaseTest.TEST_DATE_IN_LONG));
System.out.println("*** table = " + table); System.out.println("*** table = " + table);
} }
...@@ -349,6 +352,57 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -349,6 +352,57 @@ public class GraphBackedMetadataRepositoryTest {
} }
} }
@Test(dependsOnMethods = "testSubmitEntity")
public void testSearchByDSLWithInheritance() throws Exception {
String dslQuery = "Person where name = 'Jane'";
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults);
Assert.assertEquals(results.length(), 3);
System.out.println("results = " + results);
Object query = results.get("query");
Assert.assertNotNull(query);
JSONObject dataType = results.getJSONObject("dataType");
Assert.assertNotNull(dataType);
String typeName = dataType.getString("typeName");
Assert.assertEquals(typeName, "Person");
JSONArray rows = results.getJSONArray("rows");
Assert.assertEquals(rows.length(), 1);
JSONObject row = rows.getJSONObject(0);
Assert.assertEquals(row.getString("$typeName$"), "Manager");
Assert.assertEquals(row.getString("name"), "Jane");
}
@Test(dependsOnMethods = "testCreateEntity")
public void testBug37860() throws Exception {
String dslQuery =
"hive_table as t where name = 'bar' " +
"database where name = 'foo' and description = 'foo database' select t";
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults);
Assert.assertEquals(results.length(), 3);
System.out.println("results = " + results);
Object query = results.get("query");
Assert.assertNotNull(query);
JSONObject dataType = results.getJSONObject("dataType");
Assert.assertNotNull(dataType);
JSONArray rows = results.getJSONArray("rows");
Assert.assertEquals(rows.length(), 1);
}
/** /**
* Full text search requires GraphBackedSearchIndexer, and GraphBackedSearchIndexer can't be enabled in * Full text search requires GraphBackedSearchIndexer, and GraphBackedSearchIndexer can't be enabled in
* GraphBackedDiscoveryServiceTest because of its test data. So, test for full text search is in * GraphBackedDiscoveryServiceTest because of its test data. So, test for full text search is in
...@@ -399,8 +453,10 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -399,8 +453,10 @@ public class GraphBackedMetadataRepositoryTest {
TypesUtil.createClassTypeDef(DATABASE_TYPE, TypesUtil.createClassTypeDef(DATABASE_TYPE,
ImmutableList.of(SUPER_TYPE_NAME), ImmutableList.of(SUPER_TYPE_NAME),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE), TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("created", DataTypes.DATE_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE)); TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
StructTypeDefinition structTypeDefinition = StructTypeDefinition structTypeDefinition =
new StructTypeDefinition("serdeType", new StructTypeDefinition("serdeType",
new AttributeDefinition[]{ new AttributeDefinition[]{
...@@ -434,6 +490,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -434,6 +490,7 @@ public class GraphBackedMetadataRepositoryTest {
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE), TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("created", DataTypes.DATE_TYPE),
// enum // enum
new AttributeDefinition("tableType", "tableType", new AttributeDefinition("tableType", "tableType",
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
...@@ -497,6 +554,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -497,6 +554,7 @@ public class GraphBackedMetadataRepositoryTest {
tableInstance.set("name", TABLE_NAME); tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table"); tableInstance.set("description", "bar table");
tableInstance.set("type", "managed"); tableInstance.set("type", "managed");
tableInstance.set("created", new Date(BaseTest.TEST_DATE_IN_LONG));
tableInstance.set("tableType", 1); // enum tableInstance.set("tableType", 1); // enum
// super type // super type
...@@ -561,4 +619,25 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -561,4 +619,25 @@ public class GraphBackedMetadataRepositoryTest {
ClassType tableType = typeSystem.getDataType(ClassType.class, TABLE_TYPE); ClassType tableType = typeSystem.getDataType(ClassType.class, TABLE_TYPE);
return tableType.convert(tableInstance, Multiplicity.REQUIRED); return tableType.convert(tableInstance, Multiplicity.REQUIRED);
} }
private String random() {
return RandomStringUtils.random(10);
}
@Test
public void testUTFValues() throws Exception {
Referenceable hrDept = new Referenceable("Department");
Referenceable john = new Referenceable("Person");
john.set("name", random());
john.set("department", hrDept);
hrDept.set("name", random());
hrDept.set("employees", ImmutableList.of(john));
ClassType deptType = typeSystem.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
guid = repositoryService.createEntity(hrDept2);
Assert.assertNotNull(guid);
}
} }
...@@ -195,7 +195,7 @@ public class EnumTest extends BaseTest { ...@@ -195,7 +195,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" + "\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" + "\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" + "\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" + "\to : \t{b=2.0, a=1.0}\n" +
...@@ -227,7 +227,7 @@ public class EnumTest extends BaseTest { ...@@ -227,7 +227,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" + "\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" + "\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" + "\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" + "\to : \t{b=2.0, a=1.0}\n" +
...@@ -264,7 +264,7 @@ public class EnumTest extends BaseTest { ...@@ -264,7 +264,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" + "\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" + "\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" + "\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" + "\to : \t{b=2.0, a=1.0}\n" +
...@@ -304,7 +304,7 @@ public class EnumTest extends BaseTest { ...@@ -304,7 +304,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" + "\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" + "\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.100000000000000088817841970012523233890533447265625, 1" + "\tn : \t[1.100000000000000088817841970012523233890533447265625, 1" +
".100000000000000088817841970012523233890533447265625]\n" + ".100000000000000088817841970012523233890533447265625]\n" +
......
...@@ -58,7 +58,7 @@ public class StructTest extends BaseTest { ...@@ -58,7 +58,7 @@ public class StructTest extends BaseTest {
"\ti : \t1.0\n" + "\ti : \t1.0\n" +
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tl : \t2014-12-11\n" + "\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" + "\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" + "\to : \t{b=2.0, a=1.0}\n" +
...@@ -101,7 +101,7 @@ public class StructTest extends BaseTest { ...@@ -101,7 +101,7 @@ public class StructTest extends BaseTest {
"\ti : \t1.0\n" + "\ti : \t1.0\n" +
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tl : \t2014-12-11\n" + "\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.100000000000000088817841970012523233890533447265625, 1" + "\tn : \t[1.100000000000000088817841970012523233890533447265625, 1" +
".100000000000000088817841970012523233890533447265625]\n" + ".100000000000000088817841970012523233890533447265625]\n" +
......
...@@ -143,4 +143,11 @@ class GremlinTest extends FunSuite with BeforeAndAfterAll with BaseGremlinTest { ...@@ -143,4 +143,11 @@ class GremlinTest extends FunSuite with BeforeAndAfterAll with BaseGremlinTest {
val e = p("from blah").right.get val e = p("from blah").right.get
an [ExpressionException] should be thrownBy QueryProcessor.evaluate(e, g) an [ExpressionException] should be thrownBy QueryProcessor.evaluate(e, g)
} }
test("Bug37860") {
val p = new QueryParser
val e = p("Table as t where name = 'sales_fact' db where name = 'Sales' and owner = 'John ETL' select t").right.get
val r = QueryProcessor.evaluate(e, g)
validateJson(r)
}
} }
...@@ -109,7 +109,7 @@ object QueryTestsUtils extends GraphUtils { ...@@ -109,7 +109,7 @@ object QueryTestsUtils extends GraphUtils {
def viewClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "View", null, def viewClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "View", null,
Array( Array(
attrDef("name", DataTypes.STRING_TYPE), attrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("inputTables", "Table", Multiplicity.COLLECTION, false, null) new AttributeDefinition("inputTables", DataTypes.arrayTypeName("Table"), Multiplicity.COLLECTION, false, null)
)) ))
def dimTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "Dimension", null, def dimTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "Dimension", null,
......
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
source ${DIR}/metadata-config.sh
${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${METADATACPPATH} org.apache.hadoop.metadata.util.CredentialProviderUtility
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
# resolve links - $0 may be a softlink
PRG="${0}"
while [ -h "${PRG}" ]; do
ls=`ls -ld "${PRG}"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "${PRG}"`/"$link"
fi
done
BASEDIR=`dirname ${PRG}`
BASEDIR=`cd ${BASEDIR}/..;pwd`
if [ -z "$METADATA_CONF" ]; then
METADATA_CONF=${BASEDIR}/conf
fi
export METADATA_CONF
if [ -f "${METADATA_CONF}/metadata-env.sh" ]; then
. "${METADATA_CONF}/metadata-env.sh"
fi
if test -z ${JAVA_HOME}
then
JAVA_BIN=`which java`
JAR_BIN=`which jar`
else
JAVA_BIN=${JAVA_HOME}/bin/java
JAR_BIN=${JAVA_HOME}/bin/jar
fi
export JAVA_BIN
if [ ! -e $JAVA_BIN ] || [ ! -e $JAR_BIN ]; then
echo "$JAVA_BIN and/or $JAR_BIN not found on the system. Please make sure java and jar commands are available."
exit 1
fi
# default the heap size to 1GB
DEFAULT_JAVA_HEAP_MAX=-Xmx1024m
METADATA_OPTS="$DEFAULT_JAVA_HEAP_MAX $METADATA_OPTS"
METADATACPPATH="$METADATA_CONF"
METADATA_EXPANDED_WEBAPP_DIR=${METADATA_EXPANDED_WEBAPP_DIR:-${BASEDIR}/server/webapp}
export METADATA_EXPANDED_WEBAPP_DIR
# set the server classpath
if [ ! -d ${METADATA_EXPANDED_WEBAPP_DIR}/metadata/WEB-INF ]; then
mkdir -p ${METADATA_EXPANDED_WEBAPP_DIR}/metadata
cd ${METADATA_EXPANDED_WEBAPP_DIR}/metadata
$JAR_BIN -xf ${BASEDIR}/server/webapp/metadata.war
cd -
fi
METADATACPPATH="${METADATACPPATH}:${METADATA_EXPANDED_WEBAPP_DIR}/metadata/WEB-INF/classes"
METADATACPPATH="${METADATACPPATH}:${METADATA_EXPANDED_WEBAPP_DIR}/metadata/WEB-INF/lib/*:${BASEDIR}/libext/*"
# log and pid dirs for applications
METADATA_LOG_DIR="${METADATA_LOG_DIR:-$BASEDIR/logs}"
export METADATA_LOG_DIR
METADATA_PID_DIR="${METADATA_PID_DIR:-$BASEDIR/logs}"
# create the pid dir if its not there
[ -w "$METADATA_PID_DIR" ] || mkdir -p "$METADATA_PID_DIR"
export METADATA_PID_DIR
METADATA_PID_FILE=${METADATA_PID_DIR}/metadata.pid
export METADATA_PID_FILE
METADATA_DATA_DIR=${METADATA_DATA_DIR:-${BASEDIR}/data}
METADATA_HOME_DIR="${METADATA_HOME_DIR:-$BASEDIR}"
export METADATA_HOME_DIR
# make sure the process is not running
if [ -f $METADATA_PID_FILE ]; then
if kill -0 `cat $METADATA_PID_FILE` > /dev/null 2>&1; then
echo metadata running as process `cat $METADATA_PID_FILE`. Stop it first.
exit 1
fi
fi
mkdir -p $METADATA_LOG_DIR
pushd ${BASEDIR} > /dev/null
JAVA_PROPERTIES="$METADATA_OPTS $METADATA_PROPERTIES -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.home=${METADATA_HOME_DIR} -Dmetadata.conf=${METADATA_CONF} -Dmetadata.log.file=application.log"
shift
while [[ ${1} =~ ^\-D ]]; do
JAVA_PROPERTIES="${JAVA_PROPERTIES} ${1}"
shift
done
TIME=`date +%Y%m%d%H%M%s`
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
source ${DIR}/metadata-config.sh
nohup ${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${METADATACPPATH} org.apache.hadoop.metadata.Main -app ${METADATA_EXPANDED_WEBAPP_DIR}/metadata $* > "${METADATA_LOG_DIR}/metadata-server.$TIME.out" 2>&1 &
echo $! > $METADATA_PID_FILE
popd > /dev/null
echo Metadata Server started!!!
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
# resolve links - $0 may be a softlink
PRG="${0}"
while [ -h "${PRG}" ]; do
ls=`ls -ld "${PRG}"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "${PRG}"`/"$link"
fi
done
BASEDIR=`dirname ${PRG}`
BASEDIR=`cd ${BASEDIR}/..;pwd`
METADATA_PID_DIR="${METADATA_PID_DIR:-$BASEDIR/logs}"
# create the pid dir if its not there
[ -w "$METADATA_PID_DIR" ] || mkdir -p "$METADATA_PID_DIR"
export METADATA_PID_DIR
METADATA_PID_FILE=${METADATA_PID_DIR}/metadata.pid
export METADATA_PID_FILE
if [ -f $METADATA_PID_FILE ]
then
kill -15 `cat $METADATA_PID_FILE`
echo Metadata Server stopped
rm -rf $METADATA_PID_FILE
else
echo "pid file $METADATA_PID_FILE not present"
fi
...@@ -109,9 +109,10 @@ def executeEnvSh(confDir): ...@@ -109,9 +109,10 @@ def executeEnvSh(confDir):
proc.communicate() proc.communicate()
def java(classname, args, classpath, jvm_opts_list): def java(classname, args, classpath, jvm_opts_list, logdir=None):
if os.environ["JAVA_HOME"] is not None and os.environ["JAVA_HOME"]: java_home = os.environ.get("JAVA_HOME", None)
prg = os.path.join(os.environ["JAVA_HOME"], "bin", "java") if java_home:
prg = os.path.join(java_home, "bin", "java")
else: else:
prg = which("java") prg = which("java")
...@@ -121,11 +122,12 @@ def java(classname, args, classpath, jvm_opts_list): ...@@ -121,11 +122,12 @@ def java(classname, args, classpath, jvm_opts_list):
commandline.append(classpath) commandline.append(classpath)
commandline.append(classname) commandline.append(classname)
commandline.extend(args) commandline.extend(args)
return runProcess(commandline) return runProcess(commandline, logdir)
def jar(path): def jar(path):
if os.environ["JAVA_HOME"] is not None and os.environ["JAVA_HOME"]: java_home = os.environ.get("JAVA_HOME", None)
prg = os.path.join(os.environ["JAVA_HOME"], "bin", "jar") if java_home:
prg = os.path.join(java_home, "bin", "jar")
else: else:
prg = which("jar") prg = which("jar")
...@@ -153,7 +155,7 @@ def which(program): ...@@ -153,7 +155,7 @@ def which(program):
return None return None
def runProcess(commandline): def runProcess(commandline, logdir=None):
""" """
Run a process Run a process
:param commandline: command line :param commandline: command line
...@@ -161,7 +163,13 @@ def runProcess(commandline): ...@@ -161,7 +163,13 @@ def runProcess(commandline):
""" """
global finished global finished
debug ("Executing : %s" % commandline) debug ("Executing : %s" % commandline)
return subprocess.Popen(commandline) timestr = time.strftime("metadata.%Y%m%d-%H%M%S")
stdoutFile = None
stderrFile = None
if logdir:
stdoutFile = open(os.path.join(logdir, timestr + ".out"), "w")
stderrFile = open(os.path.join(logdir,timestr + ".err"), "w")
return subprocess.Popen(commandline, stdout=stdoutFile, stderr=stderrFile)
def print_output(name, src, toStdErr): def print_output(name, src, toStdErr):
""" """
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
# limitations under the License. # limitations under the License.
import os import os
import sys import sys
import traceback
import metadata_config as mc import metadata_config as mc
...@@ -29,8 +30,8 @@ def main(): ...@@ -29,8 +30,8 @@ def main():
metadata_home = mc.metadataDir() metadata_home = mc.metadataDir()
confdir = mc.dirMustExist(mc.confDir(metadata_home)) confdir = mc.dirMustExist(mc.confDir(metadata_home))
logdir = mc.dirMustExist(mc.logDir(metadata_home))
mc.executeEnvSh(confdir) mc.executeEnvSh(confdir)
logdir = mc.dirMustExist(mc.logDir(metadata_home))
#create sys property for conf dirs #create sys property for conf dirs
jvm_opts_list = (METADATA_LOG_OPTS % logdir).split() jvm_opts_list = (METADATA_LOG_OPTS % logdir).split()
...@@ -64,7 +65,7 @@ def main(): ...@@ -64,7 +65,7 @@ def main():
args = ["-app", os.path.join(web_app_dir, "metadata")] args = ["-app", os.path.join(web_app_dir, "metadata")]
args.extend(sys.argv[1:]) args.extend(sys.argv[1:])
process = mc.java("org.apache.hadoop.metadata.Main", args, metadata_classpath, jvm_opts_list) process = mc.java("org.apache.hadoop.metadata.Main", args, metadata_classpath, jvm_opts_list, logdir)
mc.writePid(metadata_pid_file, process) mc.writePid(metadata_pid_file, process)
print "Metadata Server started!!!\n" print "Metadata Server started!!!\n"
...@@ -74,6 +75,7 @@ if __name__ == '__main__': ...@@ -74,6 +75,7 @@ if __name__ == '__main__':
returncode = main() returncode = main()
except Exception as e: except Exception as e:
print "Exception: %s " % str(e) print "Exception: %s " % str(e)
print traceback.format_exc()
returncode = -1 returncode = -1
sys.exit(returncode) sys.exit(returncode)
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
import os import os
from signal import SIGTERM from signal import SIGTERM
import sys import sys
import traceback
import metadata_config as mc import metadata_config as mc
...@@ -25,8 +26,8 @@ def main(): ...@@ -25,8 +26,8 @@ def main():
metadata_home = mc.metadataDir() metadata_home = mc.metadataDir()
confdir = mc.dirMustExist(mc.confDir(metadata_home)) confdir = mc.dirMustExist(mc.confDir(metadata_home))
piddir = mc.dirMustExist(mc.logDir(metadata_home))
mc.executeEnvSh(confdir) mc.executeEnvSh(confdir)
piddir = mc.dirMustExist(mc.logDir(metadata_home))
metadata_pid_file = mc.pidFile(metadata_home) metadata_pid_file = mc.pidFile(metadata_home)
...@@ -52,6 +53,7 @@ if __name__ == '__main__': ...@@ -52,6 +53,7 @@ if __name__ == '__main__':
returncode = main() returncode = main()
except Exception as e: except Exception as e:
print "Exception: %s " % str(e) print "Exception: %s " % str(e)
print traceback.format_exc()
returncode = -1 returncode = -1
sys.exit(returncode) sys.exit(returncode)
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
# resolve links - $0 may be a softlink
PRG="${0}"
while [ -h "${PRG}" ]; do
ls=`ls -ld "${PRG}"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "${PRG}"`/"$link"
fi
done
BASEDIR=`dirname ${PRG}`
BASEDIR=`cd ${BASEDIR}/..;pwd`
if [ -z "$METADATA_CONF" ]; then
METADATA_CONF=${BASEDIR}/conf
fi
export METADATA_CONF
if [ -f "${METADATA_CONF}/metadata-env.sh" ]; then
. "${METADATA_CONF}/metadata-env.sh"
fi
if test -z ${JAVA_HOME}
then
JAVA_BIN=`which java`
JAR_BIN=`which jar`
else
JAVA_BIN=${JAVA_HOME}/bin/java
JAR_BIN=${JAVA_HOME}/bin/jar
fi
export JAVA_BIN
if [ ! -e $JAVA_BIN ] || [ ! -e $JAR_BIN ]; then
echo "$JAVA_BIN and/or $JAR_BIN not found on the system. Please make sure java and jar commands are available."
exit 1
fi
# default the heap size to 1GB
DEFAULT_JAVA_HEAP_MAX=-Xmx1024m
METADATA_OPTS="$DEFAULT_JAVA_HEAP_MAX $METADATA_OPTS"
METADATACPPATH="$METADATA_CONF"
METADATA_EXPANDED_WEBAPP_DIR=${METADATA_EXPANDED_WEBAPP_DIR:-${BASEDIR}/server/webapp}
export METADATA_EXPANDED_WEBAPP_DIR
METADATACPPATH="${METADATACPPATH}:${METADATA_EXPANDED_WEBAPP_DIR}/metadata/WEB-INF/classes"
METADATACPPATH="${METADATACPPATH}:${METADATA_EXPANDED_WEBAPP_DIR}/metadata/WEB-INF/lib/*:${BASEDIR}/libext/*"
# log and pid dirs for applications
METADATA_LOG_DIR="${METADATA_LOG_DIR:-$BASEDIR/logs}"
export METADATA_LOG_DIR
METADATA_HOME_DIR="${METADATA_HOME_DIR:-$BASEDIR}"
export METADATA_HOME_DIR
JAVA_PROPERTIES="$METADATA_OPTS $METADATA_PROPERTIES -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.home=${METADATA_HOME_DIR}"
${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${METADATACPPATH} org.apache.hadoop.metadata.examples.QuickStart
echo Example data added to Metadata Server!!!
...@@ -28,8 +28,8 @@ def main(): ...@@ -28,8 +28,8 @@ def main():
metadata_home = mc.metadataDir() metadata_home = mc.metadataDir()
confdir = mc.dirMustExist(mc.confDir(metadata_home)) confdir = mc.dirMustExist(mc.confDir(metadata_home))
logdir = mc.dirMustExist(mc.logDir(metadata_home))
mc.executeEnvSh(confdir) mc.executeEnvSh(confdir)
logdir = mc.dirMustExist(mc.logDir(metadata_home))
#create sys property for conf dirs #create sys property for conf dirs
jvm_opts_list = (METADATA_LOG_OPTS % logdir).split() jvm_opts_list = (METADATA_LOG_OPTS % logdir).split()
......
...@@ -29,14 +29,12 @@ metadata.graph.index.search.elasticsearch.local-mode=true ...@@ -29,14 +29,12 @@ metadata.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=2000 metadata.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs ######### ######### Hive Lineage Configs #########
# This models follows the quick-start guide # This models reflects the base super types for Data and Process
metadata.lineage.hive.table.type.name=hive_table metadata.lineage.hive.table.type.name=DataSet
metadata.lineage.hive.table.column.name=columns metadata.lineage.hive.table.column.name=columns
metadata.lineage.hive.process.type.name=hive_process metadata.lineage.hive.process.type.name=Process
metadata.lineage.hive.process.inputs.name=inputTables metadata.lineage.hive.process.inputs.name=inputTables
metadata.lineage.hive.process.outputs.name=outputTables metadata.lineage.hive.process.outputs.name=outputTables
#Currently unused
#metadata.lineage.hive.column.type.name=Column
######### Security Properties ######### ######### Security Properties #########
......
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
######### Security Properties #########
# SSL config
metadata.enableTLS=false
truststore.file=/path/to/truststore.jks
cert.stores.credential.provider.path=jceks://file/path/to/credentialstore.jceks
# following only required for 2-way SSL
keystore.file=/path/to/keystore.jks
# Authentication config
# enabled: true or false
metadata.http.authentication.enabled=false
# type: simple or kerberos
metadata.http.authentication.type=simple
######### Security Properties #########
...@@ -50,13 +50,13 @@ class TestMetadata(unittest.TestCase): ...@@ -50,13 +50,13 @@ class TestMetadata(unittest.TestCase):
'org.apache.hadoop.metadata.Main', 'org.apache.hadoop.metadata.Main',
['-app', 'metadata_home/server/webapp/metadata'], ['-app', 'metadata_home/server/webapp/metadata'],
'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib\\*:metadata_home/libext\\*', 'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib\\*:metadata_home/libext\\*',
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m']) ['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs')
else: else:
java_mock.assert_called_with( java_mock.assert_called_with(
'org.apache.hadoop.metadata.Main', 'org.apache.hadoop.metadata.Main',
['-app', 'metadata_home/server/webapp/metadata'], ['-app', 'metadata_home/server/webapp/metadata'],
'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib/*:metadata_home/libext/*', 'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib/*:metadata_home/libext/*',
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m']) ['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs')
pass pass
......
...@@ -120,7 +120,7 @@ class DSLTest { ...@@ -120,7 +120,7 @@ class DSLTest {
Assert.assertEquals(s"${i.o.asInstanceOf[java.util.Map[_, _]].keySet}", "[b, a]") Assert.assertEquals(s"${i.o.asInstanceOf[java.util.Map[_, _]].keySet}", "[b, a]")
// 5. Serialize mytype instance to Json // 5. Serialize mytype instance to Json
Assert.assertEquals(s"${pretty(render(i))}", "{\n \"$typeName$\":\"mytype\",\n \"e\":1," + "\n \"n\":[1,1.100000000000000088817841970012523233890533447265625],\n \"h\":1.0,\n \"b\":true,\n \"k\":1,\n \"j\":1,\n \"d\":2,\n \"m\":[1,1],\n \"g\":1,\n \"a\":1,\n \"i\":1.0,\n \"c\":1,\n \"l\":\"2014-12-03\",\n \"f\":1,\n \"o\":{\n \"b\":2.0,\n \"a\":1.0\n }\n}") Assert.assertEquals(s"${pretty(render(i))}", "{\n \"$typeName$\":\"mytype\",\n \"e\":1," + "\n \"n\":[1,1.100000000000000088817841970012523233890533447265625],\n \"h\":1.0,\n \"b\":true,\n \"k\":1,\n \"j\":1,\n \"d\":2,\n \"m\":[1,1],\n \"g\":1,\n \"a\":1,\n \"i\":1.0,\n \"c\":1,\n \"l\":\"2014-12-03T08:00:00.000Z\",\n \"f\":1,\n \"o\":{\n \"b\":2.0,\n \"a\":1.0\n }\n}")
} }
@Test def test2 { @Test def test2 {
......
...@@ -124,6 +124,11 @@ ...@@ -124,6 +124,11 @@
<groupId>com.google.inject</groupId> <groupId>com.google.inject</groupId>
<artifactId>guice</artifactId> <artifactId>guice</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
</dependencies> </dependencies>
<build> <build>
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata;
import java.util.Arrays;
import java.util.Collection;
public class ParamChecker {
/**
* Check that a value is not null. If null throws an IllegalArgumentException.
*
* @param obj value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static <T> T notNull(T obj, String name) {
if (obj == null) {
throw new IllegalArgumentException(name + " cannot be null");
}
return obj;
}
/**
* Check that a list is not null and that none of its elements is null. If null or if the list has emtpy elements
* throws an IllegalArgumentException.
* @param list the list of T.
* @param name parameter name for the exception message.
*/
public static <T> Collection<T> notNullElements(Collection<T> list, String name) {
notEmpty(list, name);
for (T ele : list) {
notNull(ele, String.format("Collection %s element %s", name, ele));
}
return list;
}
/**
* Check that a list is not null and that none of its elements is null. If null or if the list has emtpy elements
* throws an IllegalArgumentException.
* @param array the array of T.
* @param name parameter name for the exception message.
*/
public static <T> T[] notNullElements(T[] array, String name) {
notEmpty(Arrays.asList(array), name);
for (T ele : array) {
notNull(ele, String.format("Collection %s element %s", name, ele));
}
return array;
}
/**
* Check that a list is not null and not empty.
* @param list the list of T.
* @param name parameter name for the exception message.
*/
public static <T> Collection<T> notEmpty(Collection<T> list, String name) {
notNull(list, name);
if (list.isEmpty()) {
throw new IllegalArgumentException(String.format("Collection %s is empty", name));
}
return list;
}
/**
* Check that a string is not null and not empty. If null or emtpy throws an IllegalArgumentException.
*
* @param value value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static String notEmpty(String value, String name) {
return notEmpty(value, name, null);
}
/**
* Check that a string is not empty if its not null.
*
* @param value value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static String notEmptyIfNotNull(String value, String name) {
return notEmptyIfNotNull(value, name, null);
}
/**
* Check that a string is not empty if its not null.
*
* @param value value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static String notEmptyIfNotNull(String value, String name, String info) {
if (value == null) {
return value;
}
if (value.trim().length() == 0) {
throw new IllegalArgumentException(name + " cannot be empty" + (info == null ? "" : ", " + info));
}
return value.trim();
}
/**
* Check that a string is not null and not empty. If null or emtpy throws an IllegalArgumentException.
*
* @param value value.
* @param name parameter name for the exception message.
* @param info additional information to be printed with the exception message
* @return the given value.
*/
public static String notEmpty(String value, String name, String info) {
if (value == null) {
throw new IllegalArgumentException(name + " cannot be null" + (info == null ? "" : ", " + info));
}
return notEmptyIfNotNull(value, name, info);
}
/**
* Check that a list is not null and that none of its elements is null. If null or if the list has emtpy elements
* throws an IllegalArgumentException.
* @param list the list of strings.
* @param name parameter name for the exception message.
*/
public static Collection<String> notEmptyElements(Collection<String> list, String name) {
notEmpty(list, name);
for (String ele : list) {
notEmpty(ele, String.format("list %s element %s", name, ele));
}
return list;
}
}
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.base.Preconditions; import org.apache.hadoop.metadata.ParamChecker;
public final class AttributeDefinition { public final class AttributeDefinition {
...@@ -44,16 +44,13 @@ public final class AttributeDefinition { ...@@ -44,16 +44,13 @@ public final class AttributeDefinition {
public AttributeDefinition(String name, String dataTypeName, public AttributeDefinition(String name, String dataTypeName,
Multiplicity multiplicity, boolean isComposite, boolean isUnique, Multiplicity multiplicity, boolean isComposite, boolean isUnique,
boolean isIndexable, String reverseAttributeName) { boolean isIndexable, String reverseAttributeName) {
Preconditions.checkNotNull(name); this.name = ParamChecker.notEmpty(name, "Attribute name");
Preconditions.checkNotNull(dataTypeName); this.dataTypeName = ParamChecker.notEmpty(dataTypeName, "Attribute type");
this.name = name;
this.dataTypeName = dataTypeName;
this.multiplicity = multiplicity; this.multiplicity = multiplicity;
this.isComposite = isComposite; this.isComposite = isComposite;
this.isUnique = isUnique; this.isUnique = isUnique;
this.isIndexable = isIndexable; this.isIndexable = isIndexable;
this.reverseAttributeName = reverseAttributeName; this.reverseAttributeName = ParamChecker.notEmptyIfNotNull(reverseAttributeName, "Reverse attribute name");
} }
@Override @Override
......
...@@ -38,7 +38,6 @@ public class AttributeInfo { ...@@ -38,7 +38,6 @@ public class AttributeInfo {
private IDataType dataType; private IDataType dataType;
AttributeInfo(TypeSystem t, AttributeDefinition def, Map<String, IDataType> tempTypes) throws MetadataException { AttributeInfo(TypeSystem t, AttributeDefinition def, Map<String, IDataType> tempTypes) throws MetadataException {
TypeUtils.validateName(def.name);
this.name = def.name; this.name = def.name;
this.dataType = (tempTypes != null && tempTypes.containsKey(def.dataTypeName)) ? this.dataType = (tempTypes != null && tempTypes.containsKey(def.dataTypeName)) ?
tempTypes.get(def.dataTypeName) : t.getDataType(IDataType.class, def.dataTypeName); tempTypes.get(def.dataTypeName) : t.getDataType(IDataType.class, def.dataTypeName);
......
...@@ -508,7 +508,7 @@ public class DataTypes { ...@@ -508,7 +508,7 @@ public class DataTypes {
TypeSystem.getInstance().allowNullsInCollections() TypeSystem.getInstance().allowNullsInCollections()
? Multiplicity.OPTIONAL : Multiplicity.REQUIRED)); ? Multiplicity.OPTIONAL : Multiplicity.REQUIRED));
} }
return b.build(); return m.isUnique ? b.build().asList() : b.build();
} else { } else {
try { try {
return ImmutableList.of(elemType.convert(val, return ImmutableList.of(elemType.convert(val,
......
...@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.typesystem.types; ...@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import scala.math.BigInt;
public class EnumType extends AbstractDataType<EnumValue> { public class EnumType extends AbstractDataType<EnumValue> {
...@@ -54,7 +55,7 @@ public class EnumType extends AbstractDataType<EnumValue> { ...@@ -54,7 +55,7 @@ public class EnumType extends AbstractDataType<EnumValue> {
EnumValue e = null; EnumValue e = null;
if (val instanceof EnumValue) { if (val instanceof EnumValue) {
e = valueMap.get(((EnumValue)val).value); e = valueMap.get(((EnumValue)val).value);
} else if ( val instanceof Integer) { } else if ( val instanceof Integer || val instanceof BigInt) {
e = ordinalMap.get(val); e = ordinalMap.get(val);
} else if ( val instanceof String) { } else if ( val instanceof String) {
e = valueMap.get(val); e = valueMap.get(val);
......
...@@ -18,6 +18,8 @@ ...@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.ParamChecker;
import java.util.Arrays; import java.util.Arrays;
public final class EnumTypeDefinition { public final class EnumTypeDefinition {
...@@ -26,8 +28,8 @@ public final class EnumTypeDefinition { ...@@ -26,8 +28,8 @@ public final class EnumTypeDefinition {
public final EnumValue[] enumValues; public final EnumValue[] enumValues;
public EnumTypeDefinition(String name, EnumValue... enumValues) { public EnumTypeDefinition(String name, EnumValue... enumValues) {
this.name = name; this.name = ParamChecker.notEmpty(name, "Enum type name");
this.enumValues = enumValues; this.enumValues = ParamChecker.notNullElements(enumValues, "Enum values");
} }
@Override @Override
......
...@@ -18,13 +18,15 @@ ...@@ -18,13 +18,15 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.ParamChecker;
public class EnumValue { public class EnumValue {
public final String value; public final String value;
public final int ordinal; public final int ordinal;
public EnumValue(String value, int ordinal) { public EnumValue(String value, int ordinal) {
this.value = value; this.value = ParamChecker.notEmpty(value, "Enum value");
this.ordinal = ordinal; this.ordinal = ordinal;
} }
......
...@@ -47,7 +47,7 @@ public class HierarchicalTypeDefinition<T extends HierarchicalType> extends Stru ...@@ -47,7 +47,7 @@ public class HierarchicalTypeDefinition<T extends HierarchicalType> extends Stru
public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType, public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType,
String typeName, ImmutableList<String> superTypes, String typeName, ImmutableList<String> superTypes,
AttributeDefinition[] attributeDefinitions) { AttributeDefinition[] attributeDefinitions) {
super(typeName, attributeDefinitions); super(typeName, false, attributeDefinitions);
hierarchicalMetaTypeName = hierarchicalMetaType.getName(); hierarchicalMetaTypeName = hierarchicalMetaType.getName();
this.superTypes = superTypes == null ? ImmutableList.<String>of() : superTypes; this.superTypes = superTypes == null ? ImmutableList.<String>of() : superTypes;
} }
......
...@@ -18,6 +18,8 @@ ...@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.ParamChecker;
import java.util.Arrays; import java.util.Arrays;
public class StructTypeDefinition { public class StructTypeDefinition {
...@@ -25,12 +27,19 @@ public class StructTypeDefinition { ...@@ -25,12 +27,19 @@ public class StructTypeDefinition {
public final String typeName; public final String typeName;
public final AttributeDefinition[] attributeDefinitions; public final AttributeDefinition[] attributeDefinitions;
public StructTypeDefinition(String typeName, protected StructTypeDefinition(String typeName, boolean validate, AttributeDefinition... attributeDefinitions) {
AttributeDefinition[] attributeDefinitions) { this.typeName = ParamChecker.notEmpty(typeName, "Struct type name");
this.typeName = typeName; if (attributeDefinitions != null && attributeDefinitions.length != 0) {
ParamChecker.notNullElements(attributeDefinitions, "Attribute definitions");
}
this.attributeDefinitions = attributeDefinitions; this.attributeDefinitions = attributeDefinitions;
} }
public StructTypeDefinition(String typeName, AttributeDefinition[] attributeDefinitions) {
this.typeName = ParamChecker.notEmpty(typeName, "Struct type name");
this.attributeDefinitions = ParamChecker.notNullElements(attributeDefinitions, "Attribute definitions");
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
......
...@@ -47,7 +47,7 @@ public class TypeSystem { ...@@ -47,7 +47,7 @@ public class TypeSystem {
private static ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal() { private static ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal() {
@Override @Override
public SimpleDateFormat initialValue() { public SimpleDateFormat initialValue() {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
return dateFormat; return dateFormat;
} }
...@@ -291,6 +291,7 @@ public class TypeSystem { ...@@ -291,6 +291,7 @@ public class TypeSystem {
throw new MetadataException( throw new MetadataException(
String.format("Redefinition of type %s not supported", eDef.name)); String.format("Redefinition of type %s not supported", eDef.name));
} }
EnumType eT = new EnumType(this, eDef.name, eDef.enumValues); EnumType eT = new EnumType(this, eDef.name, eDef.enumValues);
types.put(eDef.name, eT); types.put(eDef.name, eT);
typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.ENUM, eDef.name); typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.ENUM, eDef.name);
...@@ -352,7 +353,6 @@ public class TypeSystem { ...@@ -352,7 +353,6 @@ public class TypeSystem {
private void step1() throws MetadataException { private void step1() throws MetadataException {
for (StructTypeDefinition sDef : structDefs) { for (StructTypeDefinition sDef : structDefs) {
assert sDef.typeName != null; assert sDef.typeName != null;
TypeUtils.validateName(sDef.typeName);
if (dataType(sDef.typeName) != null) { if (dataType(sDef.typeName) != null) {
throw new MetadataException( throw new MetadataException(
String.format("Cannot redefine type %s", sDef.typeName)); String.format("Cannot redefine type %s", sDef.typeName));
...@@ -365,7 +365,6 @@ public class TypeSystem { ...@@ -365,7 +365,6 @@ public class TypeSystem {
for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) { for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) {
assert traitDef.typeName != null; assert traitDef.typeName != null;
TypeUtils.validateName(traitDef.typeName);
if (types.containsKey(traitDef.typeName)) { if (types.containsKey(traitDef.typeName)) {
throw new MetadataException( throw new MetadataException(
String.format("Cannot redefine type %s", traitDef.typeName)); String.format("Cannot redefine type %s", traitDef.typeName));
...@@ -380,7 +379,6 @@ public class TypeSystem { ...@@ -380,7 +379,6 @@ public class TypeSystem {
for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) { for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) {
assert classDef.typeName != null; assert classDef.typeName != null;
TypeUtils.validateName(classDef.typeName);
if (types.containsKey(classDef.typeName)) { if (types.containsKey(classDef.typeName)) {
throw new MetadataException( throw new MetadataException(
String.format("Cannot redefine type %s", classDef.typeName)); String.format("Cannot redefine type %s", classDef.typeName));
...@@ -464,6 +462,14 @@ public class TypeSystem { ...@@ -464,6 +462,14 @@ public class TypeSystem {
} }
} }
if (info.multiplicity.upper > 1 && !(
info.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP ||
info.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY)) {
throw new MetadataException(
String.format("A multiplicty of more than one requires a collection type for attribute '%s'",
info.name));
}
return info; return info;
} }
......
...@@ -50,13 +50,6 @@ public class TypeUtils { ...@@ -50,13 +50,6 @@ public class TypeUtils {
} }
} }
public static void validateName(String name) throws MetadataException {
if (!NAME_PATTERN.matcher(name).matches()) {
throw new MetadataException(
String.format("Unsupported name for an attribute '%s'", name));
}
}
public static String parseAsArrayType(String typeName) { public static String parseAsArrayType(String typeName) {
Matcher m = ARRAY_TYPE_NAME_PATTERN.matcher(typeName); Matcher m = ARRAY_TYPE_NAME_PATTERN.matcher(typeName);
return m.matches() ? m.group(1) : null; return m.matches() ? m.group(1) : null;
......
...@@ -85,7 +85,7 @@ ...@@ -85,7 +85,7 @@
</logger> </logger>
<root> <root>
<priority value="debug"/> <priority value="info"/>
<appender-ref ref="console"/> <appender-ref ref="console"/>
</root> </root>
......
...@@ -126,7 +126,7 @@ object TypesSerialization { ...@@ -126,7 +126,7 @@ object TypesSerialization {
private def convertAttributeInfoToAttributeDef(aInfo: AttributeInfo) = { private def convertAttributeInfoToAttributeDef(aInfo: AttributeInfo) = {
new AttributeDefinition(aInfo.name, aInfo.dataType().getName, aInfo.multiplicity, new AttributeDefinition(aInfo.name, aInfo.dataType().getName, aInfo.multiplicity,
aInfo.isComposite, aInfo.reverseAttributeName) aInfo.isComposite, aInfo.isUnique, aInfo.isIndexable, aInfo.reverseAttributeName)
} }
private def convertEnumTypeToEnumTypeDef(et: EnumType) = { private def convertEnumTypeToEnumTypeDef(et: EnumType) = {
......
...@@ -37,6 +37,8 @@ public abstract class BaseTest { ...@@ -37,6 +37,8 @@ public abstract class BaseTest {
public static final String STRUCT_TYPE_1 = "t1"; public static final String STRUCT_TYPE_1 = "t1";
public static final String STRUCT_TYPE_2 = "t2"; public static final String STRUCT_TYPE_2 = "t2";
public static final String TEST_DATE = "2014-12-11T02:35:58.440Z";
public static final long TEST_DATE_IN_LONG=1418265358440L;
public static Struct createStruct() throws MetadataException { public static Struct createStruct() throws MetadataException {
StructType structType = TypeSystem.getInstance().getDataType( StructType structType = TypeSystem.getInstance().getDataType(
......
...@@ -179,7 +179,7 @@ public class EnumTest extends BaseTest { ...@@ -179,7 +179,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" + "\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" + "\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" + "\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" + "\to : \t{b=2.0, a=1.0}\n" +
...@@ -211,7 +211,7 @@ public class EnumTest extends BaseTest { ...@@ -211,7 +211,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" + "\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" + "\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" + "\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" + "\to : \t{b=2.0, a=1.0}\n" +
......
...@@ -54,7 +54,7 @@ public class StructTest extends BaseTest { ...@@ -54,7 +54,7 @@ public class StructTest extends BaseTest {
"\ti : \t1.0\n" + "\ti : \t1.0\n" +
"\tj : \t1\n" + "\tj : \t1\n" +
"\tk : \t1\n" + "\tk : \t1\n" +
"\tl : \t2014-12-11\n" + "\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" + "\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" + "\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" + "\to : \t{b=2.0, a=1.0}\n" +
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.AfterMethod; import org.testng.annotations.AfterMethod;
...@@ -29,6 +30,11 @@ import scala.actors.threadpool.Arrays; ...@@ -29,6 +30,11 @@ import scala.actors.threadpool.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createStructTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef;
public class TypeSystemTest extends BaseTest { public class TypeSystemTest extends BaseTest {
@BeforeClass @BeforeClass
...@@ -100,4 +106,34 @@ public class TypeSystemTest extends BaseTest { ...@@ -100,4 +106,34 @@ public class TypeSystemTest extends BaseTest {
Assert.assertFalse(Collections.disjoint(traitsNames, traits)); Assert.assertFalse(Collections.disjoint(traitsNames, traits));
} }
private String random() {
return RandomStringUtils.random(10);
}
@Test
public void testUTFNames() throws Exception {
TypeSystem ts = getTypeSystem();
String enumType = random();
EnumTypeDefinition orgLevelEnum =
new EnumTypeDefinition(enumType, new EnumValue(random(), 1), new EnumValue(random(), 2));
ts.defineEnumType(orgLevelEnum);
String structName = random();
String attrType = random();
StructTypeDefinition structType = createStructTypeDef(structName,
createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
String className = random();
HierarchicalTypeDefinition<ClassType> classType =
createClassTypeDef(className, ImmutableList.<String>of(),
createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
String traitName = random();
HierarchicalTypeDefinition<TraitType> traitType = createTraitTypeDef(traitName,
ImmutableList.<String>of(), createRequiredAttrDef(attrType, DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.of(structType), ImmutableList.of(traitType), ImmutableList.of(classType));
}
} }
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
public class ValidationTest {
@DataProvider(name = "attributeData")
private Object[][] createAttributeData() {
return new String[][]{
{null, "type"}, {"", "type"}, {"name", null}, {"name", ""}};
}
@Test (dataProvider = "attributeData", expectedExceptions = {IllegalArgumentException.class})
public void testAttributes(String name, String type) {
TypesUtil.createRequiredAttrDef(name, type);
}
@DataProvider(name = "enumValueData")
private Object[][] createEnumValueData() {
return new String[][]{{null}, {""}};
}
@Test (dataProvider = "enumValueData", expectedExceptions = {IllegalArgumentException.class})
public void testEnumValue(String name) {
new EnumValue(name, 1);
}
@DataProvider(name = "enumTypeData")
private Object[][] createEnumTypeData() {
EnumValue value = new EnumValue("name", 1);
return new Object[][]{{null, value}, {"", value}, {"name"}};
}
@Test (dataProvider = "enumTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testEnumType(String name, EnumValue... values) {
new EnumTypeDefinition(name, values);
}
@DataProvider(name = "structTypeData")
private Object[][] createStructTypeData() {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");
return new Object[][]{{null, value}, {"", value}, {"name"}};
}
@Test (dataProvider = "structTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testStructType(String name, AttributeDefinition... values) {
new StructTypeDefinition(name, values);
}
@DataProvider(name = "classTypeData")
private Object[][] createClassTypeData() {
return new Object[][]{{null}, {""}};
}
@Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testClassType(String name) {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");;
TypesUtil.createClassTypeDef(name, ImmutableList.of("super"), value);
}
@Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testTraitType(String name) {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");;
TypesUtil.createTraitTypeDef(name, ImmutableList.of("super"), value);
}
@Test
public void testValidTypes() {
AttributeDefinition attribute = TypesUtil.createRequiredAttrDef("name", "type");
//class with no attributes
TypesUtil.createClassTypeDef("name", ImmutableList.of("super"));
//class with no super types
TypesUtil.createClassTypeDef("name", ImmutableList.<String>of(), attribute);
//trait with no attributes
TypesUtil.createTraitTypeDef("name", ImmutableList.of("super"));
//trait with no super types
TypesUtil.createTraitTypeDef("name", ImmutableList.<String>of(), attribute);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.typesystem.builders
import org.apache.hadoop.metadata.MetadataException
import org.apache.hadoop.metadata.typesystem.types.{Multiplicity, ClassType, TypeSystem}
import org.scalatest.{BeforeAndAfterAll, FunSuite}
class MultiplicityTest extends FunSuite with BeforeAndAfterAll {
override def beforeAll() = {
TypeSystem.getInstance().reset()
val b = new TypesBuilder
import b._
val tDef = types {
_trait("Dimension") {}
_trait("PII") {}
_trait("Metric") {}
_trait("ETL") {}
_trait("JdbcAccess") {}
_class("DB") {
"name" ~ (string, required, indexed, unique)
"owner" ~ (string)
"createTime" ~ (int)
}
_class("StorageDesc") {
"inputFormat" ~ (string, required)
"outputFormat" ~ (string, required)
}
_class("Column") {
"name" ~ (string, required)
"dataType" ~ (string, required)
"sd" ~ ("StorageDesc", required)
}
_class("Table", List()) {
"name" ~ (string, required, indexed)
"db" ~ ("DB", required)
"sd" ~ ("StorageDesc", required)
}
_class("LoadProcess") {
"name" ~ (string, required)
"inputTables" ~ (array("Table"), collection)
"outputTable" ~ ("Table", required)
}
_class("View") {
"name" ~ (string, required)
"inputTables" ~ (array("Table"), collection)
}
_class("AT") {
"name" ~ (string, required)
"stringSet" ~ (array("string"), multiplicty(0, Int.MaxValue, true))
}
}
TypeSystem.getInstance().defineTypes(tDef)
}
test("test1") {
val b = new InstanceBuilder
import b._
val instances = b create {
val a = instance("AT") { // use instance to create Referenceables. use closure to
// set attributes of instance
'name ~ "A1" // use '~' to set attributes. Use a Symbol (names starting with ') for
'stringSet ~ Seq("a", "a")
}
}
val ts = TypeSystem.getInstance()
import scala.collection.JavaConversions._
val typedInstances = instances.map { i =>
val iTyp = ts.getDataType(classOf[ClassType], i.getTypeName)
iTyp.convert(i, Multiplicity.REQUIRED)
}
typedInstances.foreach { i =>
println(i)
}
}
test("WrongMultiplicity") {
val b = new TypesBuilder
import b._
val tDef = types {
_class("Wrong") {
"name" ~ (string, required)
"stringSet" ~ (string, multiplicty(0, Int.MaxValue, true))
}
}
val me = intercept[MetadataException] {
TypeSystem.getInstance().defineTypes(tDef)
}
assert("A multiplicty of more than one requires a collection type for attribute 'stringSet'" == me.getMessage)
}
}
...@@ -44,18 +44,18 @@ class SerializationTest extends BaseTest { ...@@ -44,18 +44,18 @@ class SerializationTest extends BaseTest {
val s: Struct = BaseTest.createStruct() val s: Struct = BaseTest.createStruct()
val ts: ITypedStruct = structType.convert(s, Multiplicity.REQUIRED) val ts: ITypedStruct = structType.convert(s, Multiplicity.REQUIRED)
Assert.assertEquals(ts.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-11\n\tm : \t[1, 1]\n\tn : \t[1.1, 1.1]\n\to : \t{b=2.0, a=1.0}\n}") Assert.assertEquals(ts.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t" + BaseTest.TEST_DATE + "\n\tm : \t[1, 1]\n\tn : \t[1.1, 1.1]\n\to : \t{b=2.0, a=1.0}\n}")
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new BigDecimalSerializer + new BigIntegerSerializer new BigDecimalSerializer + new BigIntegerSerializer
//Json representation //Json representation
val ser = swrite(ts) val ser = swrite(ts)
Assert.assertEquals(ser, "{\"$typeName$\":\"t1\",\"e\":1,\"n\":[1.1,1.1],\"h\":1.0,\"b\":true,\"k\":1,\"j\":1,\"d\":2,\"m\":[1,1],\"g\":1,\"a\":1,\"i\":1.0,\"c\":1,\"l\":\"2014-12-11T02:35:58.440Z\",\"f\":1,\"o\":{\"b\":2.0,\"a\":1.0}}") Assert.assertEquals(ser, "{\"$typeName$\":\"t1\",\"e\":1,\"n\":[1.1,1.1],\"h\":1.0,\"b\":true,\"k\":1,\"j\":1,\"d\":2,\"m\":[1,1],\"g\":1,\"a\":1,\"i\":1.0,\"c\":1,\"l\":\"" + BaseTest.TEST_DATE + "\",\"f\":1,\"o\":{\"b\":2.0,\"a\":1.0}}")
// Typed Struct read back // Typed Struct read back
val ts1 = read[StructInstance](ser) val ts1 = read[StructInstance](ser)
Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-11\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}") Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t" + BaseTest.TEST_DATE + "\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}")
} }
@Test def test2 { @Test def test2 {
...@@ -70,7 +70,7 @@ class SerializationTest extends BaseTest { ...@@ -70,7 +70,7 @@ class SerializationTest extends BaseTest {
{"$typeName$":"t1","e":1,"n":[1.1,1.1],"h":1.0,"b":true,"k":1,"j":1,"d":2,"m":[1,1],"g":1,"a":1,"i":1.0, {"$typeName$":"t1","e":1,"n":[1.1,1.1],"h":1.0,"b":true,"k":1,"j":1,"d":2,"m":[1,1],"g":1,"a":1,"i":1.0,
"c":1,"l":"2014-12-03T19:38:55.053Z","f":1,"o":{"b":2.0,"a":1.0}}""") "c":1,"l":"2014-12-03T19:38:55.053Z","f":1,"o":{"b":2.0,"a":1.0}}""")
// Typed Struct read from string // Typed Struct read from string
Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-03\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}") Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-03T19:38:55.053Z\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}")
} }
@Test def testTrait { @Test def testTrait {
......
...@@ -104,6 +104,11 @@ ...@@ -104,6 +104,11 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>com.tinkerpop.blueprints</groupId> <groupId>com.tinkerpop.blueprints</groupId>
<artifactId>blueprints-core</artifactId> <artifactId>blueprints-core</artifactId>
</dependency> </dependency>
......
...@@ -39,6 +39,7 @@ public final class Main { ...@@ -39,6 +39,7 @@ public final class Main {
private static final String APP_PATH = "app"; private static final String APP_PATH = "app";
private static final String APP_PORT = "port"; private static final String APP_PORT = "port";
private static final String METADATA_HOME = "metadata.home"; private static final String METADATA_HOME = "metadata.home";
private static final String METADATA_LOG_DIR = "metadata.log.dir";
/** /**
* Prevent users from constructing this. * Prevent users from constructing this.
...@@ -87,6 +88,9 @@ public final class Main { ...@@ -87,6 +88,9 @@ public final class Main {
if (System.getProperty(METADATA_HOME) == null) { if (System.getProperty(METADATA_HOME) == null) {
System.setProperty(METADATA_HOME, "target"); System.setProperty(METADATA_HOME, "target");
} }
if (System.getProperty(METADATA_LOG_DIR) == null) {
System.setProperty(METADATA_LOG_DIR, "target/logs");
}
} }
public static String getProjectVersion(PropertiesConfiguration buildConfiguration) { public static String getProjectVersion(PropertiesConfiguration buildConfiguration) {
......
...@@ -128,9 +128,7 @@ public class QuickStart { ...@@ -128,9 +128,7 @@ public class QuickStart {
); );
HierarchicalTypeDefinition<ClassType> tblClsDef = HierarchicalTypeDefinition<ClassType> tblClsDef =
TypesUtil.createClassTypeDef(TABLE_TYPE, null, TypesUtil.createClassTypeDef(TABLE_TYPE, ImmutableList.of("DataSet"),
attrDef("name", DataTypes.STRING_TYPE),
attrDef("description", DataTypes.STRING_TYPE),
new AttributeDefinition("db", DATABASE_TYPE, new AttributeDefinition("db", DATABASE_TYPE,
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("sd", STORAGE_DESC_TYPE, new AttributeDefinition("sd", STORAGE_DESC_TYPE,
...@@ -149,8 +147,7 @@ public class QuickStart { ...@@ -149,8 +147,7 @@ public class QuickStart {
); );
HierarchicalTypeDefinition<ClassType> loadProcessClsDef = HierarchicalTypeDefinition<ClassType> loadProcessClsDef =
TypesUtil.createClassTypeDef(LOAD_PROCESS_TYPE, null, TypesUtil.createClassTypeDef(LOAD_PROCESS_TYPE, ImmutableList.of("Process"),
attrDef("name", DataTypes.STRING_TYPE),
attrDef("userName", DataTypes.STRING_TYPE), attrDef("userName", DataTypes.STRING_TYPE),
attrDef("startTime", DataTypes.INT_TYPE), attrDef("startTime", DataTypes.INT_TYPE),
attrDef("endTime", DataTypes.INT_TYPE), attrDef("endTime", DataTypes.INT_TYPE),
...@@ -273,7 +270,7 @@ public class QuickStart { ...@@ -273,7 +270,7 @@ public class QuickStart {
"sales fact daily materialized view", reportingDB, sd, "sales fact daily materialized view", reportingDB, sd,
"Joe BI", "Managed", salesFactColumns, "Metric"); "Joe BI", "Managed", salesFactColumns, "Metric");
loadProcess("loadSalesDaily", "John ETL", loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL",
ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily), ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily),
"create table as select ", "plan", "id", "graph", "create table as select ", "plan", "id", "graph",
"ETL"); "ETL");
...@@ -288,7 +285,7 @@ public class QuickStart { ...@@ -288,7 +285,7 @@ public class QuickStart {
"sales fact monthly materialized view", "sales fact monthly materialized view",
reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric"); reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric");
loadProcess("loadSalesMonthly", "John ETL", loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL",
ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly), ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly),
"create table as select ", "plan", "id", "graph", "create table as select ", "plan", "id", "graph",
"ETL"); "ETL");
...@@ -300,7 +297,7 @@ public class QuickStart { ...@@ -300,7 +297,7 @@ public class QuickStart {
String entityJSON = InstanceSerialization.toJson(referenceable, true); String entityJSON = InstanceSerialization.toJson(referenceable, true);
System.out.println("Submitting new entity= " + entityJSON); System.out.println("Submitting new entity= " + entityJSON);
JSONObject jsonObject = metadataServiceClient.createEntity(entityJSON); JSONObject jsonObject = metadataServiceClient.createEntity(entityJSON);
String guid = jsonObject.getString(MetadataServiceClient.RESULTS); String guid = jsonObject.getString(MetadataServiceClient.GUID);
System.out.println("created instance for type " + typeName + ", guid: " + guid); System.out.println("created instance for type " + typeName + ", guid: " + guid);
// return the Id for created instance with guid // return the Id for created instance with guid
...@@ -362,7 +359,7 @@ public class QuickStart { ...@@ -362,7 +359,7 @@ public class QuickStart {
return createInstance(referenceable); return createInstance(referenceable);
} }
Id loadProcess(String name, String user, Id loadProcess(String name, String description, String user,
List<Id> inputTables, List<Id> inputTables,
List<Id> outputTables, List<Id> outputTables,
String queryText, String queryPlan, String queryText, String queryPlan,
...@@ -370,6 +367,7 @@ public class QuickStart { ...@@ -370,6 +367,7 @@ public class QuickStart {
String... traitNames) throws Exception { String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames); Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames);
referenceable.set("name", name); referenceable.set("name", name);
referenceable.set("description", description);
referenceable.set("user", user); referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis()); referenceable.set("startTime", System.currentTimeMillis());
referenceable.set("endTime", System.currentTimeMillis() + 10000); referenceable.set("endTime", System.currentTimeMillis() + 10000);
...@@ -465,6 +463,8 @@ public class QuickStart { ...@@ -465,6 +463,8 @@ public class QuickStart {
*/ */
"Table where name=\"sales_fact\", columns", "Table where name=\"sales_fact\", columns",
"Table where name=\"sales_fact\", columns as column select column.name, column.dataType, column.comment", "Table where name=\"sales_fact\", columns as column select column.name, column.dataType, column.comment",
"from DataSet",
"from Process",
}; };
} }
......
...@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.web.filters; ...@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.web.filters;
import com.google.inject.Singleton; import com.google.inject.Singleton;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.security.SecurityProperties;
import org.apache.hadoop.metadata.PropertiesUtil; import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
...@@ -44,7 +45,6 @@ import java.util.Properties; ...@@ -44,7 +45,6 @@ import java.util.Properties;
public class MetadataAuthenticationFilter extends AuthenticationFilter { public class MetadataAuthenticationFilter extends AuthenticationFilter {
private static final Logger LOG = LoggerFactory.getLogger(MetadataAuthenticationFilter.class); private static final Logger LOG = LoggerFactory.getLogger(MetadataAuthenticationFilter.class);
static final String PREFIX = "metadata.http.authentication."; static final String PREFIX = "metadata.http.authentication.";
static final String BIND_ADDRESS = "metadata.server.bind.address";
@Override @Override
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException { protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException {
...@@ -77,7 +77,7 @@ public class MetadataAuthenticationFilter extends AuthenticationFilter { ...@@ -77,7 +77,7 @@ public class MetadataAuthenticationFilter extends AuthenticationFilter {
} }
//Resolve _HOST into bind address //Resolve _HOST into bind address
String bindAddress = config.getProperty(BIND_ADDRESS); String bindAddress = configuration.getString(SecurityProperties.BIND_ADDRESS);
if (bindAddress == null) { if (bindAddress == null) {
LOG.info("No host name configured. Defaulting to local host name."); LOG.info("No host name configured. Defaulting to local host name.");
try { try {
......
...@@ -31,6 +31,7 @@ import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; ...@@ -31,6 +31,7 @@ import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.PropertiesUtil; import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.RepositoryMetadataModule; import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore; import org.apache.hadoop.metadata.repository.typestore.ITypeStore;
...@@ -83,7 +84,7 @@ public class GuiceServletConfig extends GuiceServletContextListener { ...@@ -83,7 +84,7 @@ public class GuiceServletConfig extends GuiceServletContextListener {
Map<String, String> params = new HashMap<>(); Map<String, String> params = new HashMap<>();
params.put(PackagesResourceConfig.PROPERTY_PACKAGES, packages); params.put(PackagesResourceConfig.PROPERTY_PACKAGES, packages);
serve("/api/metadata/*").with(GuiceContainer.class, params); serve("/" + MetadataServiceClient.BASE_URI + "*").with(GuiceContainer.class, params);
} }
private void configureAuthenticationFilter() throws ConfigurationException { private void configureAuthenticationFilter() throws ConfigurationException {
......
...@@ -21,14 +21,17 @@ import org.apache.commons.configuration.PropertiesConfiguration; ...@@ -21,14 +21,17 @@ import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil; import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.security.SecurityProperties;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.servlet.ServletException;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress; import java.net.InetAddress;
import java.net.UnknownHostException;
/** /**
* A class capable of performing a simple or kerberos login. * A class capable of performing a simple or kerberos login.
...@@ -73,17 +76,30 @@ public class LoginProcessor { ...@@ -73,17 +76,30 @@ public class LoginProcessor {
if (authenticationMethod == UserGroupInformation.AuthenticationMethod.SIMPLE) { if (authenticationMethod == UserGroupInformation.AuthenticationMethod.SIMPLE) {
UserGroupInformation.loginUserFromSubject(null); UserGroupInformation.loginUserFromSubject(null);
} else if (authenticationMethod == UserGroupInformation.AuthenticationMethod.KERBEROS) { } else if (authenticationMethod == UserGroupInformation.AuthenticationMethod.KERBEROS) {
String bindAddress = getHostname(configuration);
UserGroupInformation.loginUserFromKeytab( UserGroupInformation.loginUserFromKeytab(
getServerPrincipal(configuration.getString(AUTHENTICATION_PRINCIPAL)), getServerPrincipal(configuration.getString(AUTHENTICATION_PRINCIPAL), bindAddress),
configuration.getString(AUTHENTICATION_KEYTAB)); configuration.getString(AUTHENTICATION_KEYTAB));
} }
LOG.info("Logged in user {}", UserGroupInformation.getLoginUser()); LOG.info("Logged in user {}", UserGroupInformation.getLoginUser());
} catch (IOException e) { } catch (IOException e) {
throw new IllegalStateException(String.format("Unable to perform %s login.", authenticationMethod), e); throw new IllegalStateException(String.format("Unable to perform %s login.", authenticationMethod), e);
} }
} }
private String getHostname(PropertiesConfiguration configuration) {
String bindAddress = configuration.getString(SecurityProperties.BIND_ADDRESS);
if (bindAddress == null) {
LOG.info("No host name configured. Defaulting to local host name.");
try {
bindAddress = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
throw new IllegalStateException(e);
}
}
return bindAddress;
}
protected void setupHadoopConfiguration(Configuration hadoopConfig, PropertiesConfiguration configuration) { protected void setupHadoopConfiguration(Configuration hadoopConfig, PropertiesConfiguration configuration) {
String authMethod; String authMethod;
authMethod = configuration != null ? configuration.getString(AUTHENTICATION_METHOD) : null; authMethod = configuration != null ? configuration.getString(AUTHENTICATION_METHOD) : null;
...@@ -104,8 +120,8 @@ public class LoginProcessor { ...@@ -104,8 +120,8 @@ public class LoginProcessor {
* @return the service principal. * @return the service principal.
* @throws IOException * @throws IOException
*/ */
private String getServerPrincipal(String principal) throws IOException { private String getServerPrincipal(String principal, String host) throws IOException {
return SecurityUtil.getServerPrincipal(principal, InetAddress.getLocalHost().getHostName()); return SecurityUtil.getServerPrincipal(principal, host);
} }
/** /**
......
...@@ -33,12 +33,12 @@ import javax.inject.Inject; ...@@ -33,12 +33,12 @@ import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*; import javax.ws.rs.*;
import javax.ws.rs.core.Context; import javax.ws.rs.core.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException; import java.io.IOException;
import java.net.URI;
import java.util.List; import java.util.List;
/** /**
* Entity management operations as REST API. * Entity management operations as REST API.
* *
...@@ -50,12 +50,13 @@ import java.util.List; ...@@ -50,12 +50,13 @@ import java.util.List;
public class EntityResource { public class EntityResource {
private static final Logger LOG = LoggerFactory.getLogger(EntityResource.class); private static final Logger LOG = LoggerFactory.getLogger(EntityResource.class);
private static final String GUID = "GUID";
private static final String TRAIT_NAME = "traitName"; private static final String TRAIT_NAME = "traitName";
private final MetadataService metadataService; private final MetadataService metadataService;
@Context
UriInfo uriInfo;
/** /**
* Created by the Guice ServletModule and injected with the * Created by the Guice ServletModule and injected with the
* configured MetadataService. * configured MetadataService.
...@@ -79,11 +80,16 @@ public class EntityResource { ...@@ -79,11 +80,16 @@ public class EntityResource {
LOG.debug("submitting entity {} ", entity); LOG.debug("submitting entity {} ", entity);
final String guid = metadataService.createEntity(entity); final String guid = metadataService.createEntity(entity);
UriBuilder ub = uriInfo.getAbsolutePathBuilder();
URI locationURI = ub.path(guid).build();
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.RESULTS, guid); response.put(MetadataServiceClient.GUID, guid);
response.put(MetadataServiceClient.DEFINITION, entity);
return Response.ok(response).build(); return Response.created(locationURI).entity(response).build();
} catch (MetadataException | IOException | IllegalArgumentException e) { } catch (MetadataException | IOException | IllegalArgumentException e) {
LOG.error("Unable to persist entity instance", e); LOG.error("Unable to persist entity instance", e);
throw new WebApplicationException( throw new WebApplicationException(
...@@ -104,55 +110,46 @@ public class EntityResource { ...@@ -104,55 +110,46 @@ public class EntityResource {
@Path("{guid}") @Path("{guid}")
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getEntityDefinition(@PathParam("guid") String guid) { public Response getEntityDefinition(@PathParam("guid") String guid) {
Preconditions.checkNotNull(guid, "Entity GUID cannot be null");
try { try {
LOG.debug("Fetching entity definition for guid={} ", guid); LOG.debug("Fetching entity definition for guid={} ", guid);
final String entityDefinition = metadataService.getEntityDefinition(guid); final String entityDefinition = metadataService.getEntityDefinition(guid);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(GUID, guid); response.put(MetadataServiceClient.GUID, guid);
Response.Status status = Response.Status.NOT_FOUND; Response.Status status = Response.Status.NOT_FOUND;
if (entityDefinition != null) { if (entityDefinition != null) {
response.put(MetadataServiceClient.RESULTS, entityDefinition); response.put(MetadataServiceClient.DEFINITION, entityDefinition);
status = Response.Status.OK; status = Response.Status.OK;
} else {
response.put(MetadataServiceClient.ERROR, JSONObject.quote(String.format("An entity with GUID={%s} does not exist", guid)));
} }
return Response.status(status).entity(response).build(); return Response.status(status).entity(response).build();
} catch (MetadataException | IllegalArgumentException e) { } catch (MetadataException | IllegalArgumentException e) {
LOG.error("An entity with GUID={} does not exist", guid, e); LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(e, Response throw new WebApplicationException(
.status(Response.Status.NOT_FOUND) Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
.entity(e.getMessage())
.type(MediaType.APPLICATION_JSON)
.build());
} catch (JSONException e) { } catch (JSONException e) {
LOG.error("Unable to get instance definition for GUID {}", guid, e); LOG.error("Unable to get instance definition for GUID {}", guid, e);
throw new WebApplicationException(e, Response throw new WebApplicationException(
.status(Response.Status.INTERNAL_SERVER_ERROR) Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
.entity(e.getMessage())
.type(MediaType.APPLICATION_JSON)
.build());
} }
} }
/** /**
* Gets the list of entities for a given entity type. * Gets the list of entities for a given entity type.
* *
* @param entityType name of a type which is unique * @param entityType name of a type which is unique
* @param offset starting offset for pagination
* @param resultsPerPage number of results for pagination
*/ */
@GET @GET
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getEntityListByType(@QueryParam("type") String entityType, public Response getEntityListByType(@QueryParam("type") String entityType) {
@DefaultValue("0") @QueryParam("offset") Integer offset,
@QueryParam("numResults") Integer resultsPerPage) {
Preconditions.checkNotNull(entityType, "Entity type cannot be null");
try { try {
Preconditions.checkNotNull(entityType, "Entity type cannot be null");
LOG.debug("Fetching entity list for type={} ", entityType); LOG.debug("Fetching entity list for type={} ", entityType);
final List<String> entityList = metadataService.getEntityList(entityType); final List<String> entityList = metadataService.getEntityList(entityType);
...@@ -160,9 +157,13 @@ public class EntityResource { ...@@ -160,9 +157,13 @@ public class EntityResource {
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put("type", entityType); response.put("type", entityType);
response.put(MetadataServiceClient.RESULTS, new JSONArray(entityList)); response.put(MetadataServiceClient.RESULTS, new JSONArray(entityList));
response.put(MetadataServiceClient.TOTAL_SIZE, entityList.size()); response.put(MetadataServiceClient.COUNT, entityList.size());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (NullPointerException e) {
LOG.error("Entity type cannot be null", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (MetadataException | IllegalArgumentException e) { } catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for type {}", entityType, e); LOG.error("Unable to get entity list for type {}", entityType, e);
throw new WebApplicationException( throw new WebApplicationException(
...@@ -215,17 +216,15 @@ public class EntityResource { ...@@ -215,17 +216,15 @@ public class EntityResource {
@Path("{guid}/traits") @Path("{guid}/traits")
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getTraitNames(@PathParam("guid") String guid) { public Response getTraitNames(@PathParam("guid") String guid) {
Preconditions.checkNotNull(guid, "Entity GUID cannot be null");
try { try {
LOG.debug("Fetching trait names for entity={}", guid); LOG.debug("Fetching trait names for entity={}", guid);
final List<String> traitNames = metadataService.getTraitNames(guid); final List<String> traitNames = metadataService.getTraitNames(guid);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(GUID, guid); response.put(MetadataServiceClient.GUID, guid);
response.put(MetadataServiceClient.RESULTS, new JSONArray(traitNames)); response.put(MetadataServiceClient.RESULTS, new JSONArray(traitNames));
response.put(MetadataServiceClient.TOTAL_SIZE, traitNames.size()); response.put(MetadataServiceClient.COUNT, traitNames.size());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (MetadataException | IllegalArgumentException e) { } catch (MetadataException | IllegalArgumentException e) {
...@@ -250,19 +249,20 @@ public class EntityResource { ...@@ -250,19 +249,20 @@ public class EntityResource {
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response addTrait(@Context HttpServletRequest request, public Response addTrait(@Context HttpServletRequest request,
@PathParam("guid") String guid) { @PathParam("guid") String guid) {
Preconditions.checkNotNull(guid, "Entity GUID cannot be null");
try { try {
final String traitDefinition = Servlets.getRequestPayload(request); final String traitDefinition = Servlets.getRequestPayload(request);
LOG.debug("Adding trait={} for entity={} ", traitDefinition, guid); LOG.debug("Adding trait={} for entity={} ", traitDefinition, guid);
metadataService.addTrait(guid, traitDefinition); metadataService.addTrait(guid, traitDefinition);
UriBuilder ub = uriInfo.getAbsolutePathBuilder();
URI locationURI = ub.path(guid).build();
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(GUID, guid); response.put(MetadataServiceClient.GUID, guid);
response.put("traitInstance", traitDefinition); response.put(MetadataServiceClient.DEFINITION, traitDefinition);
return Response.ok(response).build(); return Response.created(locationURI).entity(response).build();
} catch (MetadataException | IOException | IllegalArgumentException e) { } catch (MetadataException | IOException | IllegalArgumentException e) {
LOG.error("Unable to add trait for entity={}", guid, e); LOG.error("Unable to add trait for entity={}", guid, e);
throw new WebApplicationException( throw new WebApplicationException(
...@@ -287,16 +287,13 @@ public class EntityResource { ...@@ -287,16 +287,13 @@ public class EntityResource {
public Response deleteTrait(@Context HttpServletRequest request, public Response deleteTrait(@Context HttpServletRequest request,
@PathParam("guid") String guid, @PathParam("guid") String guid,
@PathParam(TRAIT_NAME) String traitName) { @PathParam(TRAIT_NAME) String traitName) {
Preconditions.checkNotNull(guid, "Entity GUID cannot be null");
Preconditions.checkNotNull(traitName, "Trait name cannot be null");
LOG.debug("Deleting trait={} from entity={} ", traitName, guid); LOG.debug("Deleting trait={} from entity={} ", traitName, guid);
try { try {
metadataService.deleteTrait(guid, traitName); metadataService.deleteTrait(guid, traitName);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(GUID, guid); response.put(MetadataServiceClient.GUID, guid);
response.put(TRAIT_NAME, traitName); response.put(TRAIT_NAME, traitName);
return Response.ok(response).build(); return Response.ok(response).build();
......
...@@ -176,7 +176,7 @@ public class HiveLineageResource { ...@@ -176,7 +176,7 @@ public class HiveLineageResource {
LOG.info("Fetching lineage outputs graph for tableName={}", tableName); LOG.info("Fetching lineage outputs graph for tableName={}", tableName);
try { try {
final String jsonResult = lineageService.getOutputs(tableName); final String jsonResult = lineageService.getOutputsGraph(tableName);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
......
...@@ -6,9 +6,9 @@ ...@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* * <p/>
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* * <p/>
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
...@@ -49,6 +49,9 @@ import java.util.Map; ...@@ -49,6 +49,9 @@ import java.util.Map;
public class MetadataDiscoveryResource { public class MetadataDiscoveryResource {
private static final Logger LOG = LoggerFactory.getLogger(EntityResource.class); private static final Logger LOG = LoggerFactory.getLogger(EntityResource.class);
private static final String QUERY_TYPE_DSL = "dsl";
private static final String QUERY_TYPE_GREMLIN = "gremlin";
private static final String QUERY_TYPE_FULLTEXT = "full-text";
private final DiscoveryService discoveryService; private final DiscoveryService discoveryService;
...@@ -79,38 +82,36 @@ public class MetadataDiscoveryResource { ...@@ -79,38 +82,36 @@ public class MetadataDiscoveryResource {
return searchUsingGremlinQuery(query); return searchUsingGremlinQuery(query);
} }
try { JSONObject response;
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put("query", query);
try { // fall back to dsl
final String jsonResult = discoveryService.searchByDSL(query);
response.put("queryType", "dsl");
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
} catch (Throwable throwable) {
LOG.error("Unable to get entity list for query {} using dsl", query, throwable);
try { //fall back to full-text
final String jsonResult = discoveryService.searchByFullText(query);
response.put("queryType", "full-text");
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
} catch (DiscoveryException e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
return Response.ok(response).build(); try { // fall back to dsl
} catch (JSONException e) { final String jsonResultStr = discoveryService.searchByDSL(query);
LOG.error("Unable to get entity list for query {}", query, e); response = new DSLJSONResponseBuilder().results(jsonResultStr)
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); .query(query)
.build();
} catch (Throwable throwable) {
LOG.error("Unable to get entity list for query {} using dsl", query, throwable);
try { //fall back to full-text
final String jsonResultStr = discoveryService.searchByFullText(query);
response = new FullTextJSonResponseBuilder().results(jsonResultStr)
.query(query)
.build();
} catch (DiscoveryException e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
} }
return Response.ok(response)
.build();
} }
/** /**
...@@ -126,23 +127,22 @@ public class MetadataDiscoveryResource { ...@@ -126,23 +127,22 @@ public class MetadataDiscoveryResource {
Preconditions.checkNotNull(dslQuery, "dslQuery cannot be null"); Preconditions.checkNotNull(dslQuery, "dslQuery cannot be null");
try { try {
final String jsonResult = discoveryService.searchByDSL(dslQuery); final String jsonResultStr = discoveryService.searchByDSL(dslQuery);
JSONObject response = new JSONObject(); JSONObject response = new DSLJSONResponseBuilder().results(jsonResultStr)
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); .query(dslQuery)
response.put("query", dslQuery); .build();
response.put("queryType", "dsl");
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build(); return Response.ok(response)
.build();
} catch (DiscoveryException e) { } catch (DiscoveryException e) {
LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e); LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (JSONException e) {
LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e); LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
} }
} }
...@@ -160,29 +160,30 @@ public class MetadataDiscoveryResource { ...@@ -160,29 +160,30 @@ public class MetadataDiscoveryResource {
try { try {
final List<Map<String, String>> results = discoveryService final List<Map<String, String>> results = discoveryService
.searchByGremlin(gremlinQuery); .searchByGremlin(gremlinQuery);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put("query", gremlinQuery); response.put(MetadataServiceClient.QUERY, gremlinQuery);
response.put("queryType", "gremlin"); response.put(MetadataServiceClient.QUERY_TYPE, QUERY_TYPE_GREMLIN);
JSONArray list = new JSONArray(); JSONArray list = new JSONArray();
for (Map<String, String> result : results) { for (Map<String, String> result : results) {
list.put(new JSONObject(result)); list.put(new JSONObject(result));
} }
response.put(MetadataServiceClient.RESULTS, list); response.put(MetadataServiceClient.RESULTS, list);
response.put(MetadataServiceClient.TOTAL_SIZE, list.length()); response.put(MetadataServiceClient.COUNT, list.length());
return Response.ok(response).build(); return Response.ok(response)
.build();
} catch (DiscoveryException e) { } catch (DiscoveryException e) {
LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e); LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (JSONException e) {
LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e); LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
} }
} }
...@@ -199,15 +200,14 @@ public class MetadataDiscoveryResource { ...@@ -199,15 +200,14 @@ public class MetadataDiscoveryResource {
Preconditions.checkNotNull(query, "query cannot be null"); Preconditions.checkNotNull(query, "query cannot be null");
try { try {
final String jsonResult = discoveryService.searchByFullText(query); final String jsonResultStr = discoveryService.searchByFullText(query);
JSONArray rowsJsonArr = new JSONArray(jsonResultStr);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put("query", query);
response.put("queryType", "full-text");
response.put(MetadataServiceClient.RESULTS, new JSONArray(jsonResult));
return Response.ok(response).build(); JSONObject response = new FullTextJSonResponseBuilder().results(rowsJsonArr)
.query(query)
.build();
return Response.ok(response)
.build();
} catch (DiscoveryException e) { } catch (DiscoveryException e) {
LOG.error("Unable to get entity list for query {}", query, e); LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
...@@ -215,6 +215,104 @@ public class MetadataDiscoveryResource { ...@@ -215,6 +215,104 @@ public class MetadataDiscoveryResource {
LOG.error("Unable to get entity list for query {}", query, e); LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
} }
}
private class JsonResponseBuilder {
protected int count = 0;
protected String query;
protected String queryType;
protected JSONObject response;
JsonResponseBuilder() {
this.response = new JSONObject();
}
protected JsonResponseBuilder count(int count) {
this.count = count;
return this;
}
public JsonResponseBuilder query(String query) {
this.query = query;
return this;
}
public JsonResponseBuilder queryType(String queryType) {
this.queryType = queryType;
return this;
}
protected JSONObject build() throws JSONException {
Preconditions.checkNotNull(query, "Query cannot be null");
Preconditions.checkNotNull(queryType, "Query Type must be specified");
Preconditions.checkArgument(count >= 0, "Search Result count should be > 0");
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.QUERY, query);
response.put(MetadataServiceClient.QUERY_TYPE, queryType);
response.put(MetadataServiceClient.COUNT, count);
return response;
}
}
private class DSLJSONResponseBuilder extends JsonResponseBuilder {
DSLJSONResponseBuilder() {
super();
}
private JSONObject dslResults;
public DSLJSONResponseBuilder results(JSONObject dslResults) {
this.dslResults = dslResults;
return this;
}
public DSLJSONResponseBuilder results(String dslResults) throws JSONException {
return results(new JSONObject(dslResults));
}
@Override
public JSONObject build() throws JSONException {
Preconditions.checkNotNull(dslResults);
JSONArray rowsJsonArr = dslResults.getJSONArray(MetadataServiceClient.ROWS);
count(rowsJsonArr.length());
queryType(QUERY_TYPE_DSL);
JSONObject response = super.build();
response.put(MetadataServiceClient.RESULTS, dslResults);
return response;
}
}
private class FullTextJSonResponseBuilder extends JsonResponseBuilder {
private JSONArray fullTextResults;
public FullTextJSonResponseBuilder results(JSONArray fullTextResults) {
this.fullTextResults = fullTextResults;
return this;
}
public FullTextJSonResponseBuilder results(String dslResults) throws JSONException {
return results(new JSONArray(dslResults));
}
public FullTextJSonResponseBuilder() {
super();
}
@Override
public JSONObject build() throws JSONException {
Preconditions.checkNotNull(fullTextResults);
count(fullTextResults.length());
queryType(QUERY_TYPE_FULLTEXT);
JSONObject response = super.build();
response.put(MetadataServiceClient.RESULTS, fullTextResults);
return response;
}
} }
} }
\ No newline at end of file
...@@ -165,7 +165,7 @@ public class RexsterGraphResource { ...@@ -165,7 +165,7 @@ public class RexsterGraphResource {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.RESULTS, new JSONObject(vertexProperties)); response.put(MetadataServiceClient.RESULTS, new JSONObject(vertexProperties));
response.put(MetadataServiceClient.TOTAL_SIZE, vertexProperties.size()); response.put(MetadataServiceClient.COUNT, vertexProperties.size());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (JSONException e) { } catch (JSONException e) {
throw new WebApplicationException( throw new WebApplicationException(
...@@ -276,7 +276,7 @@ public class RexsterGraphResource { ...@@ -276,7 +276,7 @@ public class RexsterGraphResource {
if (!countOnly) { if (!countOnly) {
response.put(MetadataServiceClient.RESULTS, elementArray); response.put(MetadataServiceClient.RESULTS, elementArray);
} }
response.put(MetadataServiceClient.TOTAL_SIZE, counter); response.put(MetadataServiceClient.COUNT, counter);
return Response.ok(response).build(); return Response.ok(response).build();
} }
...@@ -298,7 +298,7 @@ public class RexsterGraphResource { ...@@ -298,7 +298,7 @@ public class RexsterGraphResource {
String message = "Edge with [" + edgeId + "] cannot be found."; String message = "Edge with [" + edgeId + "] cannot be found.";
LOG.info(message); LOG.info(message);
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND) throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity(JSONObject.quote(message)).build()); .entity(Servlets.escapeJsonString(message)).build());
} }
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
...@@ -323,7 +323,7 @@ public class RexsterGraphResource { ...@@ -323,7 +323,7 @@ public class RexsterGraphResource {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.RESULTS, vertexArray); response.put(MetadataServiceClient.RESULTS, vertexArray);
response.put(MetadataServiceClient.TOTAL_SIZE, counter); response.put(MetadataServiceClient.COUNT, counter);
return response; return response;
} }
...@@ -389,7 +389,7 @@ public class RexsterGraphResource { ...@@ -389,7 +389,7 @@ public class RexsterGraphResource {
countOnly = false; countOnly = false;
} else { } else {
throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
.entity(JSONObject.quote(directionSegment + " segment was invalid.")) .entity(Servlets.escapeJsonString(directionSegment + " segment was invalid."))
.build()); .build());
} }
} }
......
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import com.google.common.base.Preconditions; import com.sun.jersey.api.client.ClientResponse;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.services.MetadataService; import org.apache.hadoop.metadata.services.MetadataService;
...@@ -33,11 +33,22 @@ import org.slf4j.LoggerFactory; ...@@ -33,11 +33,22 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject; import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*; import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context; import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
* This class provides RESTful API for Types. * This class provides RESTful API for Types.
...@@ -74,13 +85,22 @@ public class TypesResource { ...@@ -74,13 +85,22 @@ public class TypesResource {
final String typeDefinition = Servlets.getRequestPayload(request); final String typeDefinition = Servlets.getRequestPayload(request);
LOG.debug("creating type with definition {} ", typeDefinition); LOG.debug("creating type with definition {} ", typeDefinition);
JSONObject typesAdded = metadataService.createType(typeDefinition); JSONObject typesJson = metadataService.createType(typeDefinition);
final JSONArray typesJsonArray = typesJson.getJSONArray(MetadataServiceClient.TYPES);
List<Map<String, String>> typesAddedList = new ArrayList<>();
for (int i = 0; i < typesJsonArray.length(); i++) {
final String name = typesJsonArray.getString(i);
typesAddedList.add(
new HashMap<String, String>() {{
put(MetadataServiceClient.NAME, name);
}});
}
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("types", typesAdded);
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.TYPES, typesAddedList);
return Response.ok(response).build(); return Response.status(ClientResponse.Status.CREATED).entity(response).build();
} catch (Exception e) { } catch (Exception e) {
LOG.error("Unable to persist types", e); LOG.error("Unable to persist types", e);
throw new WebApplicationException( throw new WebApplicationException(
...@@ -103,7 +123,7 @@ public class TypesResource { ...@@ -103,7 +123,7 @@ public class TypesResource {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("typeName", typeName); response.put("typeName", typeName);
response.put("definition", typeDefinition); response.put(MetadataServiceClient.DEFINITION, typeDefinition);
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build(); return Response.ok(response).build();
...@@ -120,13 +140,18 @@ public class TypesResource { ...@@ -120,13 +140,18 @@ public class TypesResource {
/** /**
* Gets the list of trait type names registered in the type system. * Gets the list of trait type names registered in the type system.
*
* @param type type should be the name of enum
* org.apache.hadoop.metadata.typesystem.types.DataTypes.TypeCategory
* Typically, would be one of all, TRAIT, CLASS, ENUM, STRUCT
* @return entity names response payload as json
*/ */
@GET @GET
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getTypesByFilter(@Context HttpServletRequest request, public Response getTypesByFilter(@Context HttpServletRequest request,
@DefaultValue(TYPE_ALL) @QueryParam("type") String type) { @DefaultValue(TYPE_ALL) @QueryParam("type") String type) {
try { try {
List<String> result = null; List<String> result;
if (TYPE_ALL.equals(type)) { if (TYPE_ALL.equals(type)) {
result = metadataService.getTypeNamesList(); result = metadataService.getTypeNamesList();
} else { } else {
...@@ -136,11 +161,11 @@ public class TypesResource { ...@@ -136,11 +161,11 @@ public class TypesResource {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.RESULTS, new JSONArray(result)); response.put(MetadataServiceClient.RESULTS, new JSONArray(result));
response.put(MetadataServiceClient.TOTAL_SIZE, result.size()); response.put(MetadataServiceClient.COUNT, result.size());
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build(); return Response.ok(response).build();
} catch(IllegalArgumentException ie) { } catch (IllegalArgumentException ie) {
LOG.error("Unsupported typeName while retrieving type list {}", type); LOG.error("Unsupported typeName while retrieving type list {}", type);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse("Unsupported type " + type, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse("Unsupported type " + type, Response.Status.BAD_REQUEST));
......
...@@ -18,14 +18,22 @@ ...@@ -18,14 +18,22 @@
package org.apache.hadoop.metadata.web.util; package org.apache.hadoop.metadata.web.util;
import com.google.common.base.Preconditions;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter; import java.io.StringWriter;
/** /**
...@@ -33,6 +41,9 @@ import java.io.StringWriter; ...@@ -33,6 +41,9 @@ import java.io.StringWriter;
*/ */
public final class Servlets { public final class Servlets {
public static final String QUOTE = "\"";
private static final Logger LOG = LoggerFactory.getLogger(Servlets.class);
private Servlets() { private Servlets() {
/* singleton */ /* singleton */
} }
...@@ -93,13 +104,34 @@ public final class Servlets { ...@@ -93,13 +104,34 @@ public final class Servlets {
} }
public static Response getErrorResponse(Throwable e, Response.Status status) { public static Response getErrorResponse(Throwable e, Response.Status status) {
return getErrorResponse(e.getMessage(), status); Response response = getErrorResponse(e.getMessage(), status);
JSONObject responseJson = (JSONObject) response.getEntity();
try {
responseJson.put(MetadataServiceClient.STACKTRACE, printStackTrace(e));
} catch (JSONException e1) {
LOG.warn("Could not construct error Json rensponse", e1);
}
return response;
}
private static String printStackTrace(Throwable t) {
StringWriter sw = new StringWriter();
t.printStackTrace(new PrintWriter(sw));
return sw.toString();
} }
public static Response getErrorResponse(String message, Response.Status status) { public static Response getErrorResponse(String message, Response.Status status) {
JSONObject errorJson = new JSONObject();
Object errorEntity = Servlets.escapeJsonString(message);
try {
errorJson.put(MetadataServiceClient.ERROR, errorEntity);
errorEntity = errorJson;
} catch (JSONException jsonE) {
LOG.warn("Could not construct error Json rensponse", jsonE);
}
return Response return Response
.status(status) .status(status)
.entity(JSONObject.quote(message)) .entity(errorEntity)
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.build(); .build();
} }
...@@ -113,4 +145,9 @@ public final class Servlets { ...@@ -113,4 +145,9 @@ public final class Servlets {
public static String getRequestId() { public static String getRequestId() {
return Thread.currentThread().getName(); return Thread.currentThread().getName();
} }
public static String escapeJsonString(String inputStr) {
ParamChecker.notNull(inputStr, "Input String cannot be null");
return StringEscapeUtils.escapeJson(inputStr);
}
} }
...@@ -29,7 +29,6 @@ metadata.graph.index.search.elasticsearch.local-mode=true ...@@ -29,7 +29,6 @@ metadata.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=2000 metadata.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs ######### ######### Hive Lineage Configs #########
# This models follows the quick-start guide
metadata.lineage.hive.table.type.name=hive_table metadata.lineage.hive.table.type.name=hive_table
metadata.lineage.hive.column.type.name=hive_column metadata.lineage.hive.column.type.name=hive_column
metadata.lineage.hive.table.column.name=columns metadata.lineage.hive.table.column.name=columns
......
...@@ -76,7 +76,7 @@ public abstract class BaseResourceIT { ...@@ -76,7 +76,7 @@ public abstract class BaseResourceIT {
.accept(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, typesAsJSON); .method(HttpMethod.POST, ClientResponse.class, typesAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); Assert.assertEquals(clientResponse.getStatus(), Response.Status.CREATED.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
...@@ -93,7 +93,7 @@ public abstract class BaseResourceIT { ...@@ -93,7 +93,7 @@ public abstract class BaseResourceIT {
String entityJSON = InstanceSerialization.toJson(referenceable, true); String entityJSON = InstanceSerialization.toJson(referenceable, true);
System.out.println("Submitting new entity= " + entityJSON); System.out.println("Submitting new entity= " + entityJSON);
JSONObject jsonObject = serviceClient.createEntity(entityJSON); JSONObject jsonObject = serviceClient.createEntity(entityJSON);
String guid = jsonObject.getString(MetadataServiceClient.RESULTS); String guid = jsonObject.getString(MetadataServiceClient.GUID);
System.out.println("created instance for type " + typeName + ", guid: " + guid); System.out.println("created instance for type " + typeName + ", guid: " + guid);
// return the reference to created instance with guid // return the reference to created instance with guid
......
...@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.web.resources; ...@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.web.resources;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.hadoop.metadata.typesystem.Struct;
...@@ -147,7 +148,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -147,7 +148,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID)); Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
final String definition = response.getString(MetadataServiceClient.RESULTS); final String definition = response.getString(MetadataServiceClient.DEFINITION);
Assert.assertNotNull(definition); Assert.assertNotNull(definition);
LOG.debug("tableInstanceAfterGet = " + definition); LOG.debug("tableInstanceAfterGet = " + definition);
InstanceSerialization.fromJsonReferenceable(definition, true); InstanceSerialization.fromJsonReferenceable(definition, true);
...@@ -176,7 +177,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -176,7 +177,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
private String getEntityDefinition(ClientResponse clientResponse) throws Exception { private String getEntityDefinition(ClientResponse clientResponse) throws Exception {
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
JSONObject response = new JSONObject(clientResponse.getEntity(String.class)); JSONObject response = new JSONObject(clientResponse.getEntity(String.class));
final String definition = response.getString(MetadataServiceClient.RESULTS); final String definition = response.getString(MetadataServiceClient.DEFINITION);
Assert.assertNotNull(definition); Assert.assertNotNull(definition);
return definition; return definition;
...@@ -196,6 +197,10 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -196,6 +197,10 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.ERROR));
Assert.assertNotNull(response.get(MetadataServiceClient.STACKTRACE));
} }
@Test(dependsOnMethods = "testSubmitEntity") @Test(dependsOnMethods = "testSubmitEntity")
...@@ -227,11 +232,14 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -227,11 +232,14 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
.accept(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class); .method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
Response.Status.BAD_REQUEST.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.ERROR));
Assert.assertNotNull(response.get(MetadataServiceClient.STACKTRACE));
} }
@Test @Test
...@@ -311,15 +319,15 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -311,15 +319,15 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
.accept(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON); .method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); Assert.assertEquals(clientResponse.getStatus(), Response.Status.CREATED.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID)); Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
Assert.assertNotNull(response.get("GUID")); Assert.assertNotNull(response.get(MetadataServiceClient.GUID));
Assert.assertNotNull(response.get("traitInstance")); Assert.assertNotNull(response.get(MetadataServiceClient.DEFINITION));
} }
@Test @Test
...@@ -341,8 +349,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -341,8 +349,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
.accept(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON); .method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
Response.Status.BAD_REQUEST.getStatusCode());
} }
@Test (dependsOnMethods = "testAddTrait") @Test (dependsOnMethods = "testAddTrait")
...@@ -381,8 +388,43 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -381,8 +388,43 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
.accept(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.method(HttpMethod.DELETE, ClientResponse.class); .method(HttpMethod.DELETE, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
Response.Status.BAD_REQUEST.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.ERROR));
Assert.assertEquals(response.getString(MetadataServiceClient.ERROR), "trait=" + traitName + " should be defined in type system before it can be deleted");
Assert.assertNotNull(response.get(MetadataServiceClient.STACKTRACE));
}
private String random() {
return RandomStringUtils.random(10);
}
@Test
public void testUTF8() throws Exception {
String classType = random();
String attrName = random();
String attrValue = random();
HierarchicalTypeDefinition<ClassType> classTypeDefinition =
TypesUtil.createClassTypeDef(classType, ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef(attrName, DataTypes.STRING_TYPE));
TypesDef typesDef = TypeUtils.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
ImmutableList.of(classTypeDefinition));
createType(typesDef);
Referenceable instance = new Referenceable(classType);
instance.set(attrName, attrValue);
Id guid = createInstance(instance);
ClientResponse response = getEntityDefinition(guid._getId());
String definition = getEntityDefinition(response);
Referenceable getReferenceable = InstanceSerialization.fromJsonReferenceable(definition, true);
Assert.assertEquals(getReferenceable.get(attrName), attrValue);
} }
private void createHiveTypes() throws Exception { private void createHiveTypes() throws Exception {
......
...@@ -95,6 +95,40 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT { ...@@ -95,6 +95,40 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
} }
@Test @Test
public void testInputsGraph() throws Exception {
WebResource resource = service
.path(BASE_URI)
.path("sales_fact_monthly_mv")
.path("inputs")
.path("graph");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
System.out.println("inputs graph = " + responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
JSONObject results = response.getJSONObject(MetadataServiceClient.RESULTS);
Assert.assertNotNull(results);
JSONObject values = results.getJSONObject("values");
Assert.assertNotNull(values);
final JSONObject vertices = values.getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 4);
final JSONObject edges = values.getJSONObject("edges");
Assert.assertEquals(edges.length(), 4);
}
@Test
public void testOutputs() throws Exception { public void testOutputs() throws Exception {
WebResource resource = service WebResource resource = service
.path(BASE_URI) .path(BASE_URI)
...@@ -126,6 +160,40 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT { ...@@ -126,6 +160,40 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
} }
@Test @Test
public void testOutputsGraph() throws Exception {
WebResource resource = service
.path(BASE_URI)
.path("sales_fact")
.path("outputs")
.path("graph");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
System.out.println("outputs graph= " + responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
JSONObject results = response.getJSONObject(MetadataServiceClient.RESULTS);
Assert.assertNotNull(results);
JSONObject values = results.getJSONObject("values");
Assert.assertNotNull(values);
final JSONObject vertices = values.getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 3);
final JSONObject edges = values.getJSONObject("edges");
Assert.assertEquals(edges.length(), 4);
}
@Test
public void testSchema() throws Exception { public void testSchema() throws Exception {
WebResource resource = service WebResource resource = service
.path(BASE_URI) .path(BASE_URI)
......
...@@ -83,8 +83,12 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -83,8 +83,12 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
JSONObject results = response.getJSONObject(MetadataServiceClient.RESULTS); JSONObject results = response.getJSONObject(MetadataServiceClient.RESULTS);
Assert.assertNotNull(results); Assert.assertNotNull(results);
JSONArray rows = results.getJSONArray("rows"); JSONArray rows = results.getJSONArray(MetadataServiceClient.ROWS);
Assert.assertEquals(rows.length(), 1); Assert.assertEquals(rows.length(), 1);
int numRows = response.getInt(MetadataServiceClient.COUNT);
Assert.assertEquals(numRows, 1);
} }
@Test @Test
...@@ -164,6 +168,9 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -164,6 +168,9 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(row.get("guid")); Assert.assertNotNull(row.get("guid"));
Assert.assertEquals(row.getString("typeName"), "dsl_test_type"); Assert.assertEquals(row.getString("typeName"), "dsl_test_type");
Assert.assertNotNull(row.get("score")); Assert.assertNotNull(row.get("score"));
int numRows = response.getInt(MetadataServiceClient.COUNT);
Assert.assertEquals(numRows, 1);
} }
private void createTypes() throws Exception { private void createTypes() throws Exception {
......
...@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList; ...@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList;
import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
...@@ -76,13 +77,13 @@ public class TypesJerseyResourceIT extends BaseResourceIT { ...@@ -76,13 +77,13 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
.accept(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, typesAsJSON); .method(HttpMethod.POST, ClientResponse.class, typesAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); Assert.assertEquals(clientResponse.getStatus(), Response.Status.CREATED.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("types")); Assert.assertNotNull(response.get(MetadataServiceClient.TYPES));
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID)); Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
} }
} }
...@@ -104,10 +105,21 @@ public class TypesJerseyResourceIT extends BaseResourceIT { ...@@ -104,10 +105,21 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("definition")); Assert.assertNotNull(response.get(MetadataServiceClient.DEFINITION));
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID)); Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
String typesJson = response.getString(MetadataServiceClient.DEFINITION);
final TypesDef typesDef = TypesSerialization.fromJson(typesJson);
List<HierarchicalTypeDefinition<ClassType>> hierarchicalTypeDefinitions = typesDef.classTypesAsJavaList();
for(HierarchicalTypeDefinition<ClassType> classType : hierarchicalTypeDefinitions) {
for(AttributeDefinition attrDef : classType.attributeDefinitions) {
if("name".equals(attrDef.name)) {
Assert.assertEquals(attrDef.isIndexable, true);
Assert.assertEquals(attrDef.isUnique, true);
}
}
}
} }
} }
...@@ -196,14 +208,14 @@ public class TypesJerseyResourceIT extends BaseResourceIT { ...@@ -196,14 +208,14 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition = HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
TypesUtil.createClassTypeDef("database", TypesUtil.createClassTypeDef("database",
ImmutableList.<String>of(), ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE)); TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
typeDefinitions.add(databaseTypeDefinition); typeDefinitions.add(databaseTypeDefinition);
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil.createClassTypeDef( HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil.createClassTypeDef(
"table", "table",
ImmutableList.<String>of(), ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
new AttributeDefinition("database", new AttributeDefinition("database",
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment