Commit 9cea4f64 by dileep bhimineni

merge

merged conflicts Merge branch 'master' into HDPDGI-34-1 Conflicts: dashboard/v2/public/css/common.css dashboard/v2/public/modules/details/views/attribute.html dashboard/v2/public/modules/details/views/details.html dashboard/v2/public/modules/details/views/schema.html dashboard/v2/public/modules/home/views/header.html dashboard/v2/public/modules/lineage/lineageController.js dashboard/v2/public/modules/navigation/navigationController.js dashboard/v2/public/modules/navigation/navigationModule.js dashboard/v2/public/modules/search/searchController.js dashboard/v2/public/modules/search/searchRoutes.js dashboard/v2/public/modules/search/views/search.html dashboard/v2/public/modules/search/views/searchResult.html dashboard/v2/public/views/includes/foot.html dashboard/v2/public/views/includes/head.html
parents dd61e708 cbf662db
......@@ -33,32 +33,13 @@
<packaging>jar</packaging>
<properties>
<hive.version>1.1.0</hive.version>
<hive.version>1.2.0</hive.version>
<calcite.version>0.9.2-incubating</calcite.version>
<hadoop.version>2.6.0</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId>
<version>${version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
<scope>runtime</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-typesystem</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>${hadoop.version}</version>
......@@ -99,6 +80,25 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId>
<version>${version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
<scope>runtime</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-typesystem</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
......
......@@ -280,6 +280,8 @@ public class HiveDataModelGenerator {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("clusterName", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(),
......@@ -322,8 +324,6 @@ public class HiveDataModelGenerator {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
//new AttributeDefinition("type", DefinedTypes.HIVE_TYPE.getName(), Multiplicity
// .REQUIRED, false, null),
new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("comment", DataTypes.STRING_TYPE.getName(),
......@@ -452,8 +452,9 @@ public class HiveDataModelGenerator {
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("functionType", HiveDataTypes.HIVE_FUNCTION_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("resourceUris", HiveDataTypes.HIVE_RESOURCEURI.getName(),
Multiplicity.COLLECTION, false, null),
new AttributeDefinition("resourceUris",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_RESOURCEURI.getName()), Multiplicity.OPTIONAL, false,
null),
};
HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
......
......@@ -29,6 +29,10 @@ hive conf directory:
<name>hive.hook.dgi.url</name>
<value>http://localhost:21000/</value>
</property>
<property>
<name>hive.cluster.name</name>
<value>primary</value>
</property>
</verbatim>
Usage: <dgi package>/bin/import-hive.sh. The logs are in <dgi package>/logs/import-hive.log
......@@ -44,12 +48,16 @@ The hook submits the request to a thread pool executor to avoid blocking the com
<value>org.apache.hadoop.metadata.hive.hook.HiveHook</value>
</property>
</verbatim>
* Add the following property in hive-ste.xml with the DGI endpoint for your set-up
* Add the following properties in hive-ste.xml with the DGI endpoint for your set-up
<verbatim>
<property>
<name>hive.hook.dgi.url</name>
<value>http://localhost:21000/</value>
</property>
<property>
<name>hive.cluster.name</name>
<value>primary</value>
</property>
</verbatim>
* Add 'export HIVE_AUX_JARS_PATH=<dgi package>/hook/hive' in hive-env.sh
......
......@@ -24,7 +24,6 @@ import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
......@@ -32,8 +31,11 @@ import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
public class HiveHookIT {
private static final String DGI_URL = "http://localhost:21000/";
private static final String CLUSTER_NAME = "test";
private Driver driver;
private MetadataServiceClient dgiCLient;
private SessionState ss;
......@@ -59,6 +61,10 @@ public class HiveHookIT {
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true");
hiveConf.set(HiveMetaStoreBridge.HIVE_CLUSTER_NAME, CLUSTER_NAME);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODE, true); //to not use hdfs
hiveConf.setVar(HiveConf.ConfVars.HIVETESTMODEPREFIX, "");
hiveConf.set("fs.pfile.impl", "org.apache.hadoop.fs.ProxyLocalFileSystem");
return hiveConf;
}
......@@ -69,7 +75,7 @@ public class HiveHookIT {
@Test
public void testCreateDatabase() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
String dbName = "db" + random();
runCommand("create database " + dbName);
assertDatabaseIsRegistered(dbName);
......@@ -77,16 +83,16 @@ public class HiveHookIT {
@Test
public void testCreateTable() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
String dbName = "db" + random();
runCommand("create database " + dbName);
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
String tableName = "table" + random();
runCommand("create table " + dbName + "." + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
assertTableIsRegistered(dbName, tableName);
tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string) partitioned by(dt string)");
assertTableIsRegistered("default", tableName);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered("default");
......@@ -94,27 +100,137 @@ public class HiveHookIT {
@Test
public void testCTAS() throws Exception {
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)");
String ctasTableName = "table" + random();
String query = "create table " + ctasTableName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered("default", ctasTableName);
assertProcessIsRegistered(query);
}
@Test
public void testCreateView() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)");
String viewName = "table" + random();
String query = "create view " + viewName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered("default", viewName);
assertProcessIsRegistered(query);
}
@Test
public void testLoadData() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)");
String loadFile = file("load");
String query = "load data local inpath 'file://" + loadFile + "' into table " + tableName;
runCommand(query);
assertProcessIsRegistered(query);
}
@Test
public void testInsert() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string) partitioned by(dt string)");
String insertTableName = "table" + random();
runCommand("create table " + insertTableName + "(name string) partitioned by(dt string)");
String query = "insert into " + insertTableName + " partition(dt = '2015-01-01') select name from "
+ tableName + " where dt = '2015-01-01'";
runCommand(query);
assertProcessIsRegistered(query);
assertPartitionIsRegistered("default", insertTableName, "2015-01-01");
}
private String random() {
return RandomStringUtils.randomAlphanumeric(5).toLowerCase();
}
private String file(String tag) throws Exception {
String filename = "./target/" + tag + "-data-" + random();
File file = new File(filename);
file.createNewFile();
return file.getAbsolutePath();
}
private String mkdir(String tag) throws Exception {
String filename = "./target/" + tag + "-data-" + random();
File file = new File(filename);
file.mkdirs();
return file.getAbsolutePath();
}
@Test
public void testExportImport() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(name string)");
String filename = "pfile://" + mkdir("export");
String query = "export table " + tableName + " to '" + filename + "'";
runCommand(query);
assertProcessIsRegistered(query);
tableName = "table" + random();
runCommand("create table " + tableName + "(name string)");
query = "import table " + tableName + " from '" + filename + "'";
runCommand(query);
assertProcessIsRegistered(query);
}
@Test
public void testSelect() throws Exception {
String tableName = "table" + random();
runCommand("create table " + tableName + "(id int, name string)");
String newTableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
String query = "create table " + newTableName + " as select * from " + tableName;
String query = "select * from " + tableName;
runCommand(query);
assertProcessIsRegistered(query);
}
assertTableIsRegistered(newTableName);
assertInstanceIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), "queryText", query);
private void assertProcessIsRegistered(String queryStr) throws Exception {
String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(), queryStr);
assertEntityIsRegistered(dslQuery);
}
private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
private void assertTableIsRegistered(String dbName, String tableName) throws Exception {
String query = String.format("%s where name = '%s', dbName where name = '%s' and clusterName = '%s'",
HiveDataTypes.HIVE_TABLE.getName(), tableName, dbName, CLUSTER_NAME);
assertEntityIsRegistered(query);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
String query = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(),
dbName, CLUSTER_NAME);
assertEntityIsRegistered(query);
}
private void assertPartitionIsRegistered(String dbName, String tableName, String value) throws Exception {
String typeName = HiveDataTypes.HIVE_PARTITION.getName();
String dbType = HiveDataTypes.HIVE_DB.getName();
String tableType = HiveDataTypes.HIVE_TABLE.getName();
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.values', ['%s']).as('p')."
+ "out('__%s.tableName').has('%s.name', '%s').out('__%s.dbName').has('%s.name', '%s')"
+ ".has('%s.clusterName', '%s').back('p').toList()", typeName, typeName, value, typeName,
tableType, tableName, tableType, dbType, dbName, dbType, CLUSTER_NAME);
JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
Assert.assertEquals(results.length(), 1);
}
private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception{
JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
private void assertEntityIsRegistered(String dslQuery) throws Exception{
JSONArray results = dgiCLient.searchByDSL(dslQuery);
Assert.assertEquals(results.length(), 1);
}
}
......@@ -205,40 +205,6 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
assertDatabaseIsRegistered(dbName);
}
@Test
public void testCreateTable() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create database " + dbName);
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + dbName + "." + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered("default");
}
@Test
public void testCTAS() throws Exception {
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
String newTableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
String query = "create table " + newTableName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered(newTableName);
assertInstanceIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), "queryText", query);
}
private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
}
......
......@@ -208,40 +208,6 @@ public class SSLHiveHookIT {
assertDatabaseIsRegistered(dbName);
}
@Test
public void testCreateTable() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create database " + dbName);
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + dbName + "." + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered("default");
}
@Test
public void testCTAS() throws Exception {
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
String newTableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
String query = "create table " + newTableName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered(newTableName);
assertInstanceIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), "queryText", query);
}
private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
}
......
......@@ -25,10 +25,8 @@ import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.security.SecureClientUtils;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.Serialization;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
......@@ -36,6 +34,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.POST;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
......@@ -49,14 +48,28 @@ import static org.apache.hadoop.metadata.security.SecurityProperties.TLS_ENABLED
*/
public class MetadataServiceClient {
private static final Logger LOG = LoggerFactory.getLogger(MetadataServiceClient.class);
public static final String NAME = "name";
public static final String GUID = "GUID";
public static final String TYPENAME = "typeName";
public static final String DEFINITION = "definition";
public static final String ERROR = "error";
public static final String STACKTRACE = "stackTrace";
public static final String REQUEST_ID = "requestId";
public static final String RESULTS = "results";
public static final String TOTAL_SIZE = "totalSize";
private static final String BASE_URI = "api/metadata/";
private static final String URI_TYPES = "types";
private static final String URI_ENTITIES = "entities";
private static final String URI_TRAITS = "traits";
private static final String URI_SEARCH = "discovery/search";
public static final String COUNT = "count";
public static final String ROWS = "rows";
public static final String BASE_URI = "api/metadata/";
public static final String TYPES = "types";
public static final String URI_ENTITIES = "entities";
public static final String URI_TRAITS = "traits";
public static final String URI_SEARCH = "discovery/search";
public static final String QUERY = "query";
public static final String QUERY_TYPE = "queryType";
private WebResource service;
......@@ -73,6 +86,7 @@ public class MetadataServiceClient {
} catch (Exception e) {
LOG.info("Error processing client configuration.", e);
}
URLConnectionClientHandler handler = SecureClientUtils.getClientConnectionHandler(config, clientConfig);
Client client = new Client(handler, config);
......@@ -86,17 +100,18 @@ public class MetadataServiceClient {
}
static enum API {
//Type operations
CREATE_TYPE(BASE_URI + URI_TYPES, HttpMethod.POST),
GET_TYPE(BASE_URI + URI_TYPES, HttpMethod.GET),
LIST_TYPES(BASE_URI + URI_TYPES, HttpMethod.GET),
LIST_TRAIT_TYPES(BASE_URI + URI_TYPES + "?type=trait", HttpMethod.GET),
CREATE_TYPE(BASE_URI + TYPES, HttpMethod.POST),
GET_TYPE(BASE_URI + TYPES, HttpMethod.GET),
LIST_TYPES(BASE_URI + TYPES, HttpMethod.GET),
LIST_TRAIT_TYPES(BASE_URI + TYPES + "?type=trait", HttpMethod.GET),
//Entity operations
CREATE_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.POST),
GET_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.GET),
UPDATE_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.PUT),
LIST_ENTITY(BASE_URI + URI_ENTITIES + "?type=", HttpMethod.GET),
LIST_ENTITY(BASE_URI + URI_ENTITIES, HttpMethod.GET),
//Trait operations
ADD_TRAITS(BASE_URI + URI_TRAITS, HttpMethod.POST),
......@@ -145,7 +160,7 @@ public class MetadataServiceClient {
WebResource resource = getResource(API.GET_TYPE, typeName);
try {
JSONObject response = callAPIWithResource(API.GET_TYPE, resource);
return response.getString("definition");
return response.getString(DEFINITION);
} catch (MetadataServiceException e) {
if (e.getStatus() == ClientResponse.Status.NOT_FOUND) {
return null;
......@@ -185,7 +200,7 @@ public class MetadataServiceClient {
public Referenceable getEntity(String guid) throws MetadataServiceException {
JSONObject jsonResponse = callAPI(API.GET_ENTITY, null, guid);
try {
String entityInstanceDefinition = jsonResponse.getString(MetadataServiceClient.RESULTS);
String entityInstanceDefinition = jsonResponse.getString(MetadataServiceClient.DEFINITION);
return InstanceSerialization.fromJsonReferenceable(entityInstanceDefinition, true);
} catch (JSONException e) {
throw new MetadataServiceException(e);
......@@ -194,7 +209,7 @@ public class MetadataServiceClient {
public JSONObject searchEntity(String searchQuery) throws MetadataServiceException {
WebResource resource = getResource(API.SEARCH);
resource = resource.queryParam("query", searchQuery);
resource = resource.queryParam(QUERY, searchQuery);
return callAPIWithResource(API.SEARCH, resource);
}
......@@ -224,10 +239,10 @@ public class MetadataServiceClient {
*/
public JSONArray searchByDSL(String query) throws MetadataServiceException {
WebResource resource = getResource(API.SEARCH_DSL);
resource = resource.queryParam("query", query);
resource = resource.queryParam(QUERY, query);
JSONObject result = callAPIWithResource(API.SEARCH_DSL, resource);
try {
return result.getJSONObject("results").getJSONArray("rows");
return result.getJSONObject(RESULTS).getJSONArray(ROWS);
} catch (JSONException e) {
throw new MetadataServiceException(e);
}
......@@ -241,7 +256,7 @@ public class MetadataServiceClient {
*/
public JSONObject searchByGremlin(String gremlinQuery) throws MetadataServiceException {
WebResource resource = getResource(API.SEARCH_GREMLIN);
resource = resource.queryParam("query", gremlinQuery);
resource = resource.queryParam(QUERY, gremlinQuery);
return callAPIWithResource(API.SEARCH_GREMLIN, resource);
}
......@@ -253,7 +268,7 @@ public class MetadataServiceClient {
*/
public JSONObject searchByFullText(String query) throws MetadataServiceException {
WebResource resource = getResource(API.SEARCH_FULL_TEXT);
resource = resource.queryParam("query", query);
resource = resource.queryParam(QUERY, query);
return callAPIWithResource(API.SEARCH_FULL_TEXT, resource);
}
......@@ -286,7 +301,9 @@ public class MetadataServiceClient {
.type(MediaType.APPLICATION_JSON)
.method(api.getMethod(), ClientResponse.class, requestObject);
if (clientResponse.getStatus() == Response.Status.OK.getStatusCode()) {
Response.Status expectedStatus = (api.getMethod() == HttpMethod.POST)
? Response.Status.CREATED : Response.Status.OK;
if (clientResponse.getStatus() == expectedStatus.getStatusCode()) {
String responseAsString = clientResponse.getEntity(String.class);
try {
return new JSONObject(responseAsString);
......
......@@ -18,13 +18,11 @@
package org.apache.hadoop.metadata;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
public class PropertiesUtil {
......@@ -33,11 +31,11 @@ public class PropertiesUtil {
private static final String APPLICATION_PROPERTIES = "application.properties";
public static final String CLIENT_PROPERTIES = "client.properties";
public static final PropertiesConfiguration getApplicationProperties() throws MetadataException {
public static PropertiesConfiguration getApplicationProperties() throws MetadataException {
return getPropertiesConfiguration(APPLICATION_PROPERTIES);
}
public static final PropertiesConfiguration getClientProperties() throws MetadataException {
public static PropertiesConfiguration getClientProperties() throws MetadataException {
return getPropertiesConfiguration(CLIENT_PROPERTIES);
}
......
......@@ -31,5 +31,6 @@ public interface SecurityProperties {
public static final String SERVER_CERT_PASSWORD_KEY = "password";
public static final String CLIENT_AUTH_KEY = "client.auth.enabled";
public static final String CERT_STORES_CREDENTIAL_PROVIDER_PATH = "cert.stores.credential.provider.path";
String SSL_CLIENT_PROPERTIES = "ssl-client.xml";
public static final String SSL_CLIENT_PROPERTIES = "ssl-client.xml";
public static final String BIND_ADDRESS = "metadata.server.bind.address";
}
......@@ -124,3 +124,6 @@ footer.navbar-bottom img {
{
height: 800px;
}
.pagination {
float: right;
}
\ No newline at end of file
......@@ -30,4 +30,23 @@ angular.module('dgc.details').factory('DetailsResource', ['$resource', function(
responseType: 'json'
}
});
//$scope.getSchema= function (tableName) {
//
// $http.get('/api/metadata/lineage/hive/table/'+tableName +'/schema')
// .success(function (data) {
// $scope.iserror1=false;
// $scope.schema= angular.fromJson(data.results.rows);
// // console.log(tableName);
//
//
// })
// .error(function () {
// // alert("Sorry No response");
//
//
//
// });
//}
}]);
......@@ -22,10 +22,9 @@
<!--{{value}}-->
<!--</div>-->
<div class="row" data-ng-repeat="(key1,value1) in value" ng-if="value1">
<div class="row" data-ng-repeat="(key1,value1) in value" ng-if="value1">
<div class="col-md-6" data-ng-if="!isString(value1)" data-ng-repeat="(key2,value2) in value1 track by $index"></div>
<div data-ng-if="isString(value2)" data-ng-repeat="(key3,value3) in value2"> {{key3}}: {{value3}}</div>
<div class="col-md-6" data-ng-if="isString(value1)"> {{key1}}: {{value1 | date:'medium'}}</div>
<div class="col-md-6" data-ng-if="isString(value1)"> {{key1}} : {{value1 | date:'medium'}}</div>
</div>
</div>
\ No newline at end of file
<div data-ng-include="'/modules/lineage/views/lineage.html'"></div>
\ No newline at end of file
......@@ -17,5 +17,9 @@
-->
<div data-ng-controller="LineageController">
<svg></svg>
<svg class="lineage-viz"><g></svg>
</div>
......@@ -21,7 +21,8 @@
angular.module('dgc.navigation').controller('NavigationController', ['$scope', 'NavigationResource',
function($scope, NavigationResource) {
$scope.leftnav = NavigationResource.get();
$scope.leftnav= NavigationResource.get();
$scope.updateVar = function(event) {
$scope.$$prevSibling.query = angular.element(event.target).text();
......
......@@ -17,5 +17,4 @@
*/
'use strict';
angular.module('dgc.navigation', []);
angular.module('dgc.navigation',[]);
......@@ -21,53 +21,85 @@
angular.module('dgc.search').controller('SearchController', ['$scope', '$location', '$http', '$state', '$stateParams', 'SearchResource', 'NotificationService',
function($scope, $location, $http, $state, $stateParams, SearchResource, NotificationService) {
$scope.types = ['table', 'column', 'db', 'view', 'loadprocess', 'storagedesc'];
$scope.types = ['table','column','db','view','loadprocess','storagedesc'];
$scope.results = [];
$scope.resultCount = 0;
$scope.resultCount=0;
$scope.isCollapsed = true;
$scope.currentPage = 1;
$scope.itemsPerPage = 10;
$scope.totalItems = 40;
$scope.filteredResults = [];
$scope.resultRows = [];
$scope.setPage = function (pageNo) {
$scope.currentPage = pageNo;
};
$scope.search = function(query) {
$scope.results = [];
NotificationService.reset();
$scope.limit = 4;
SearchResource.search({
query: query
}, function searchSuccess(response) {
SearchResource.search({query:query}, function searchSuccess(response) {
$scope.resultCount=response.count;
$scope.results = response.results;
$scope.resultCount = response.count;
$scope.resultRows = $scope.results.rows;
$scope.totalItems = $scope.resultCount;
$scope.$watch('currentPage + itemsPerPage', function() {
var begin = (($scope.currentPage - 1) * $scope.itemsPerPage),
end = begin + $scope.itemsPerPage;
$scope.searchTypesAvailable = $scope.typeAvailable();
if ($scope.searchTypesAvailable) {
$scope.searchMessage = 'loading results...';
$scope.filteredResults = $scope.resultRows.slice(begin, end);
$scope.pageCount = function () {
return Math.ceil($scope.resultCount / $scope.itemsPerPage);
};
if ($scope.results.rows)
$scope.searchMessage = $scope.results.rows.length +' results matching your search query '+ $scope.query +' were found';
else
$scope.searchMessage = '0 results matching your search query '+ $scope.query +' were found';
if ($scope.results.length < 1) {
NotificationService.error('No Result found', false);
}
$state.go('search.results', {
query: query
}, {
} else {
$scope.searchMessage = '0 results matching your search query '+ $scope.query +' were found';
}
});
$state.go('search.results', {query:query}, {
location: 'replace'
});
}, function searchError(err) {
NotificationService.error('Error occurred during executing search query, error status code = ' + err.status + ', status text = ' + err.statusText, false);
});
};
$scope.typeAvailable = function() {
return $scope.types.indexOf(this.results.dataType.typeName && this.results.dataType.typeName.toLowerCase()) > -1;
};
/* $scope.$watch("currentPage + numPerPage", function() {
var begin = (($scope.currentPage - 1) * $scope.numPerPage);
var end = begin + $scope.numPerPage;
$scope.filteredResults = $scope.rows.slice(begin, end);
});*/
if($scope.results.dataType) {
return $scope.types.indexOf($scope.results.dataType.typeName && $scope.results.dataType.typeName.toLowerCase()) > -1;
}
};
$scope.doToggle = function($event,el) {
this.isCollapsed = !el;
};
$scope.filterSearchResults = function(items) {
var res = {};
var count = 0;
angular.forEach(items, function(value, key) {
if ((typeof value !== 'object'))
if(typeof value !== 'object') {
res[key] = value;
count++;
}
});
$scope.keyLength = count;
return res;
};
$scope.query = $stateParams.query;
$scope.searchQuery = $location.search();
$scope.query=($location.search()).query;
if ($scope.query) {
$scope.searchMessage = 'searching...';
$scope.search($scope.query);
}
}
......
......@@ -21,7 +21,6 @@
//Setting up route
angular.module('dgc.search').config(['$stateProvider',
function($stateProvider) {
// states for my app
$stateProvider.state('search', {
url: '/search',
......@@ -31,6 +30,10 @@ angular.module('dgc.search').config(['$stateProvider',
url: '/:query',
templateUrl: '/modules/search/views/searchResult.html',
controller: 'SearchController'
}).state('search.results', {
url: '?query',
templateUrl: '/modules/search/views/searchResult.html',
controller:'SearchController'
});
}
]);
......@@ -21,7 +21,7 @@
<form name="form" novalidate class="container">
<div class="col-lg-7 col-lg-offset-3">
<div class="row input-group">
<input type="text" class="form-control" placeholder="Search" data-ng-model="query" data-typeahead="type for type in types" required/>
<input type="text" class="form-control" placeholder="Search" data-ng-model="query" required/>
<span class="input-group-btn">
<button class="btn btn-success" type="submit" data-ng-disabled="form.$invalid" data-ng-click="search(query)">
<i class="glyphicon glyphicon-search white "></i>
......@@ -39,7 +39,7 @@
<div data-ng-include="'/modules/navigation/views/navigation.html'"></div>
<div class="col-lg-9" data-ui-view=""></div>
<div class="col-lg-9" data-ui-view="" style="min-height: 1350px;"></div>
</div>
</div>
</div>
\ No newline at end of file
......@@ -15,28 +15,33 @@
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!--
<h4 ng-show='results.rows.length > 0'>{{results.rows.length}} results matching your search query "{{query}}" were found</h4>
<h4 ng-show='results.rows.length == "0"'>searching .....</h4>
<h4 ng-show='!(results.rows)'>0 results matching your search query "{{query}}" were found</h4>-->
<h4 ng-show="searchMessage">{{searchMessage}}</h4>
<h4>{{resultCount}} results matching your search query "{{query}}" were found</h4>
<ul class="list-unstyled">
<li ng-repeat="result in results.rows" class="searchresults">
<div data-ng-if="typeAvailable()">
<li ng-repeat="result in filteredResults" class="searchresults">
<h4><a data-ui-sref="details({id:result['$id$'].id})">{{result.name}}</a></h4>
<p>{{result.description}}</p>
<span ng-repeat="(key, value) in filterSearchResults(result)" >
<span ng-show="$index < 4"><b>{{key}}: </b>{{value}}{{$index+1 === limit ? '' : ', '}}</span>
</span>
<div collapse="isCollapsed">
<span class="well well-lg"><span ng-repeat="(key, value) in filterSearchResults(result)" >
<span ng-repeat="(key, value) in filterSearchResults(result)" >
<span ng-show="$index > 4"><b>{{key}}: </b>{{value}}{{$last ? '' : ', '}}</span>
</span></span>
</span>
</div>
<a href="javascript: void(0);" class="show-more" ng-click="isCollapsed = !isCollapsed">..show more</a>
<!-- <a href="javascript: void(0);" bn-slide-show class="show-more" ng-click="doToggle(!isCollapsed)">..show more</a>-->
<a href ng-show="isCollapsed && (keyLength > 4)" ng-click="doToggle($event,isCollapsed)">..show more</a>
<a href ng-show="!isCollapsed" ng-click="doToggle($event,isCollapsed)">..show less</a>
<h5>Tags : <a ng-repeat="(key, value) in result['$traits$']" data-ui-sref="search.results({query: key})">{{key}}</a> </h5>
</div>
<div data-ng-if="!typeAvailable()" data-ng-include="'/modules/search/views/types/guid.html'"></div>
<div data-ng-if="!searchTypesAvailable" data-ng-include="'/modules/search/views/types/guid.html'"></div>
</li>
</ul>
<div ng-show='results.rows.length > 0'>
<pagination total-items="totalItems" items-per-page="itemsPerPage" ng-model="currentPage" ng-change="pageChanged()"></pagination>
<p>
</div>
---+ Metadata Discovery
---++ Introduction
---++ Details
---+ Search
Atlas exposes search over the metadata in two ways:
* Search using DSL
* Full-text search
---++ Search DSL Grammar
The DSL exposes an SQL like query language for searching the metadata based on the type system.
The grammar for the DSL is below.
<verbatim>
queryWithPath: query ~ opt(WITHPATH)
query: rep1sep(singleQuery, opt(COMMA))
singleQuery: singleQrySrc ~ opt(loopExpression) ~ opt(selectClause)
singleQrySrc = FROM ~ fromSrc ~ opt(WHERE) ~ opt(expr ^? notIdExpression) |
WHERE ~ (expr ^? notIdExpression) |
expr ^? notIdExpression |
fromSrc ~ opt(WHERE) ~ opt(expr ^? notIdExpression)
fromSrc: identifier ~ AS ~ alias | identifier
loopExpression: LOOP ~ (LPAREN ~> query <~ RPAREN) ~ opt(intConstant <~ TIMES) ~ opt(AS ~> alias)
selectClause: SELECT ~ rep1sep(selectExpression, COMMA)
selectExpression: expr ~ opt(AS ~> alias)
expr: compE ~ opt(rep(exprRight))
exprRight: (AND | OR) ~ compE
compE:
arithE ~ (LT | LTE | EQ | NEQ | GT | GTE) ~ arithE |
arithE ~ (ISA | IS) ~ ident |
arithE ~ HAS ~ ident |
arithE
arithE: multiE ~ opt(rep(arithERight))
arithERight: (PLUS | MINUS) ~ multiE
multiE: atomE ~ opt(rep(multiERight))
multiERight: (STAR | DIV) ~ atomE
atomE: literal | identifier | LPAREN ~> expr <~ RPAREN
identifier: rep1sep(ident, DOT)
alias: ident | stringLit
literal: booleanConstant |
intConstant |
longConstant |
floatConstant |
doubleConstant |
stringLit
</verbatim>
Grammar language:
{noformat}
opt(a) => a is optional
~ => a combinator. 'a ~ b' means a followed by b
rep => zero or more
rep1sep => one or more, separated by second arg.
{noformat}
Language Notes:
* A *SingleQuery* expression can be used to search for entities of a _Trait_ or _Class_.
Entities can be filtered based on a 'Where Clause' and Entity Attributes can be retrieved based on a 'Select Clause'.
* An Entity Graph can be traversed/joined by combining one or more SingleQueries.
* An attempt is made to make the expressions look SQL like by accepting keywords "SELECT",
"FROM", and "WHERE"; but these are optional and users can simply think in terms of Entity Graph Traversals.
* The transitive closure of an Entity relationship can be expressed via the _Loop_ expression. A
_Loop_ expression can be any traversal (recursively a query) that represents a _Path_ that ends in an Entity of the same _Type_ as the starting Entity.
* The _WithPath_ clause can be used with transitive closure queries to retrieve the Path that
connects the two related Entities. (We also provide a higher level interface for Closure Queries see scaladoc for 'org.apache.metadata.query.ClosureQuery')
* There are couple of Predicate functions different from SQL:
* _is_ or _isa_can be used to filter Entities that have a particular Trait.
* _has_ can be used to filter Entities that have a value for a particular Attribute.
---+++ DSL Examples
* from DB
* DB where name="Reporting" select name, owner
* DB has name
* DB is JdbcAccess
* Column where Column isa PII
* Table where name="sales_fact", columns
* Table where name="sales_fact", columns as column select column.name, column.dataType, column.comment
---++ Full-text Search
Atlas also exposes a lucene style full-text search capability.
\ No newline at end of file
......@@ -44,7 +44,9 @@ allows integration with the whole enterprise data ecosystem.
* [[Architecture][High Level Architecture]]
* [[TypeSystem][Type System]]
* [[Repository][Metadata Repository]]
* [[Discovery][Search]]
* [[Search][Search]]
* [[security][security]]
* [[Configuration][Configuration]]
---++ API Documentation
......
......@@ -78,7 +78,7 @@
<slf4j.version>1.7.7</slf4j.version>
<jetty.version>6.1.26</jetty.version>
<jersey.version>1.9</jersey.version>
<tinkerpop.version>2.5.0</tinkerpop.version>
<tinkerpop.version>2.6.0</tinkerpop.version>
<titan.version>0.5.4</titan.version>
<hadoop.version>2.6.0</hadoop.version>
......@@ -101,6 +101,9 @@
<StagingId>apache-staging</StagingId>
<StagingName>Apache Release Distribution Repository</StagingName>
<StagingUrl>https://repository.apache.org/content/groups/staging</StagingUrl>
<!-- skips checkstyle and find bugs -->
<skipCheck>false</skipCheck>
</properties>
<profiles>
......@@ -611,6 +614,12 @@
<version>1.8.5</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.4</version>
</dependency>
</dependencies>
</dependencyManagement>
......@@ -971,6 +980,7 @@
</goals>
<phase>verify</phase>
<configuration>
<skip>${skipCheck}</skip>
<consoleOutput>true</consoleOutput>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<configLocation>src/build/checkstyle.xml</configLocation>
......@@ -988,6 +998,7 @@
<!--debug>true</debug -->
<xmlOutput>true</xmlOutput>
<failOnError>false</failOnError>
<skip>${skipCheck}</skip>
</configuration>
<executions>
<execution>
......
......@@ -22,6 +22,7 @@ import com.thinkaurelius.titan.core.TitanVertex;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.query.Expressions;
import org.apache.hadoop.metadata.query.GraphPersistenceStrategies;
import org.apache.hadoop.metadata.query.GraphPersistenceStrategies$class;
import org.apache.hadoop.metadata.query.TypeUtils;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.Constants;
......@@ -71,7 +72,11 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
@Override
public String fieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) {
try {
return metadataRepository.getFieldNameInVertex(dataType, aInfo);
} catch (MetadataException e) {
throw new RuntimeException(e);
}
}
@Override
......@@ -164,33 +169,12 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
@Override
public String gremlinCompOp(Expressions.ComparisonExpression op) {
switch (op.symbol()) {
case "=":
return "T.eq";
case "!=":
return "T.neq";
case ">":
return "T.gt";
case ">=":
return "T.gte";
case "<":
return "T.lt";
case "<=":
return "T.lte";
default:
throw new RuntimeException(("Comparison operator not supported in Gremlin: " + op));
}
return GraphPersistenceStrategies$class.gremlinCompOp(this, op);
}
@Override
public String loopObjectExpression(IDataType<?> dataType) {
return "{it.object." + typeAttributeName() + " == '" + dataType.getName() + "'}";
return GraphPersistenceStrategies$class.loopObjectExpression(this, dataType);
}
@Override
......@@ -202,4 +186,9 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
@Override
public String idAttributeName() { return metadataRepository.getIdAttributeName(); }
@Override
public String typeTestExpression(String typeName) {
return GraphPersistenceStrategies$class.typeTestExpression(this, typeName);
}
}
......@@ -23,6 +23,9 @@ import com.thinkaurelius.titan.core.TitanIndexQuery;
import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.gremlin.groovy.Gremlin;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.DiscoveryService;
import org.apache.hadoop.metadata.query.Expressions;
......@@ -66,6 +69,8 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
private final TitanGraph titanGraph;
private final DefaultGraphPersistenceStrategy graphPersistenceStrategy;
public final static String SCORE = "score";
@Inject
GraphBackedDiscoveryService(GraphProvider<TitanGraph> graphProvider,
MetadataRepository metadataRepository) throws DiscoveryException {
......@@ -93,8 +98,8 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
if (guid != null) { //Filter non-class entities
try {
row.put("guid", guid);
row.put("typeName", vertex.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY));
row.put("score", result.getScore());
row.put(MetadataServiceClient.TYPENAME, vertex.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY));
row.put(SCORE, result.getScore());
} catch (JSONException e) {
LOG.error("Unable to create response", e);
throw new DiscoveryException("Unable to create response");
......
......@@ -23,41 +23,44 @@ public final class Constants {
/**
* Globally Unique identifier property key.
*/
public static final String GUID_PROPERTY_KEY = "guid";
public static final String INTERNAL_PROPERTY_KEY_PREFIX = "__";
public static final String GUID_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "guid";
public static final String GUID_INDEX = "guid_index";
/**
* Entity type name property key.
*/
public static final String ENTITY_TYPE_PROPERTY_KEY = "typeName";
public static final String ENTITY_TYPE_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "typeName";
public static final String ENTITY_TYPE_INDEX = "type_index";
/**
* Entity type's super types property key.
*/
public static final String SUPER_TYPES_PROPERTY_KEY = "superTypeNames";
public static final String SUPER_TYPES_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "superTypeNames";
public static final String SUPER_TYPES_INDEX = "super_types_index";
/**
* Full-text for the entity for enabling full-text search.
*/
//weird issue in TitanDB if __ added to this property key. Not adding it for now
public static final String ENTITY_TEXT_PROPERTY_KEY = "entityText";
/**
* Properties for type store graph
*/
public static final String TYPE_CATEGORY_PROPERTY_KEY = "type.category";
public static final String VERTEX_TYPE_PROPERTY_KEY = "type";
public static final String TYPENAME_PROPERTY_KEY = "type.name";
public static final String TYPE_CATEGORY_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "type.category";
public static final String VERTEX_TYPE_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "type";
public static final String TYPENAME_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "type.name";
/**
* Trait names property key and index name.
*/
public static final String TRAIT_NAMES_PROPERTY_KEY = "traitNames";
public static final String TRAIT_NAMES_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "traitNames";
public static final String TRAIT_NAMES_INDEX = "trait_names_index";
public static final String VERSION_PROPERTY_KEY = "version";
public static final String TIMESTAMP_PROPERTY_KEY = "timestamp";
public static final String VERSION_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "version";
public static final String TIMESTAMP_PROPERTY_KEY = INTERNAL_PROPERTY_KEY_PREFIX + "timestamp";
/**
* search backing index name.
......
......@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.repository;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.IReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedStruct;
......@@ -61,7 +62,7 @@ public interface MetadataRepository {
* @param aInfo attribute info
* @return property key used to store a given attribute
*/
String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo);
String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) throws MetadataException;
/**
* Return the edge label for a given attribute in the repository.
......
......@@ -45,6 +45,7 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;
import java.util.Map;
/**
......@@ -199,7 +200,7 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
break;
case ENUM:
createVertexMixedIndex(propertyName, Integer.class);
createVertexMixedIndex(propertyName, String.class);
break;
case ARRAY:
......@@ -249,8 +250,11 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
return Double.class;
} else if (dataType == DataTypes.BIGDECIMAL_TYPE) {
return BigDecimal.class;
} else if (dataType == DataTypes.DATE_TYPE) {
return Date.class;
}
throw new IllegalArgumentException("unknown data type " + dataType);
}
......
......@@ -53,8 +53,8 @@ import java.util.Iterator;
import java.util.List;
public class GraphBackedTypeStore implements ITypeStore {
public static final String VERTEX_TYPE = "typeSystem";
private static final String PROPERTY_PREFIX = "type.";
public static final String VERTEX_TYPE = Constants.INTERNAL_PROPERTY_KEY_PREFIX + "typeSystem";
private static final String PROPERTY_PREFIX = Constants.INTERNAL_PROPERTY_KEY_PREFIX + "type.";
public static final String SUPERTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".supertype";
public static final String SUBTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".subtype";
......@@ -188,7 +188,7 @@ public class GraphBackedTypeStore implements ITypeStore {
break;
default:
throw new IllegalArgumentException("Unhandled type category " + attrDataType.getTypeCategory());
throw new IllegalArgumentException("Attribute cannot reference instances of type : " + attrDataType.getTypeCategory());
}
for (IDataType attrType : attrDataTypes) {
......
......@@ -20,8 +20,9 @@ package org.apache.hadoop.metadata.services;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.inject.Injector;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.classification.InterfaceAudience;
import org.apache.hadoop.metadata.discovery.SearchIndexer;
import org.apache.hadoop.metadata.listener.EntityChangeListener;
import org.apache.hadoop.metadata.listener.TypesChangeListener;
......@@ -33,14 +34,20 @@ import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.Serialization$;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.types.*;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.tools.cmd.Meta;
import javax.inject.Inject;
import javax.inject.Singleton;
......@@ -83,12 +90,38 @@ public class DefaultMetadataService implements MetadataService {
try {
TypesDef typesDef = typeStore.restore();
typeSystem.defineTypes(typesDef);
createSuperTypes();
} catch (MetadataException e) {
throw new RuntimeException(e);
}
LOG.info("Restored type system from the store");
}
private static final AttributeDefinition NAME_ATTRIBUTE =
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE);
private static final AttributeDefinition DESCRIPTION_ATTRIBUTE =
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE);
private static final String[] SUPER_TYPES = {
"DataSet",
"Process",
"Infrastructure",
};
@InterfaceAudience.Private
public void createSuperTypes() throws MetadataException {
if (typeSystem.isRegistered(SUPER_TYPES[0])) {
return; // this is already registered
}
for (String superTypeName : SUPER_TYPES) {
HierarchicalTypeDefinition<ClassType> superTypeDefinition =
TypesUtil.createClassTypeDef(superTypeName,
ImmutableList.<String>of(),
NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE);
typeSystem.defineClassType(superTypeDefinition);
}
}
/**
* Creates a new type based on the type system to enable adding
......@@ -107,19 +140,15 @@ public class DefaultMetadataService implements MetadataService {
if(typesDef.isEmpty())
throw new MetadataException("Invalid type definition");
Map<String, IDataType> typesAdded = typeSystem.defineTypes(typesDef);
final Map<String, IDataType> typesAdded = typeSystem.defineTypes(typesDef);
//TODO how do we handle transaction - store failure??
typeStore.store(typeSystem, ImmutableList.copyOf(typesAdded.keySet()));
onTypesAddedToRepo(typesAdded);
JSONObject response = new JSONObject();
for (Map.Entry<String, IDataType> entry : typesAdded.entrySet()) {
response.put(entry.getKey(), entry.getValue().getName());
}
return response;
return new JSONObject() {{
put(MetadataServiceClient.TYPES, typesAdded.keySet());
}};
} catch (JSONException e) {
LOG.error("Unable to create response for types={}", typeDefinition, e);
throw new MetadataException("Unable to create response");
......
......@@ -112,7 +112,18 @@ trait GraphPersistenceStrategies {
}
def loopObjectExpression(dataType: IDataType[_]) = {
s"{it.object.'${typeAttributeName}' == '${dataType.getName}'}"
_typeTestExpression(dataType.getName, "it.object")
}
def typeTestExpression(typeName : String) :String = {
_typeTestExpression(typeName, "it")
}
private def _typeTestExpression(typeName: String, itRef: String): String = {
s"""{(${itRef}.'${typeAttributeName}' == '${typeName}') |
|(${itRef}.'${superTypeAttributeName}' ?
|${itRef}.'${superTypeAttributeName}'.contains('${typeName}') : false)}""".
stripMargin.replace(System.getProperty("line.separator"), "")
}
}
......
......@@ -184,8 +184,10 @@ class GremlinTranslator(expr: Expression,
}
private def genQuery(expr: Expression, inSelect: Boolean): String = expr match {
case ClassExpression(clsName) => s"""has("${gPersistenceBehavior.typeAttributeName}","$clsName")"""
case TraitExpression(clsName) => s"""has("${gPersistenceBehavior.typeAttributeName}","$clsName")"""
case ClassExpression(clsName) =>
s"""filter${gPersistenceBehavior.typeTestExpression(clsName)}"""
case TraitExpression(clsName) =>
s"""filter${gPersistenceBehavior.typeTestExpression(clsName)}"""
case fe@FieldExpression(fieldName, fInfo, child) if fe.dataType.getTypeCategory == TypeCategory.PRIMITIVE => {
val fN = "\"" + gPersistenceBehavior.fieldNameInVertex(fInfo.dataType, fInfo.attrInfo) + "\""
child match {
......
......@@ -29,7 +29,6 @@ import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.query.HiveTitanSample;
import org.apache.hadoop.metadata.query.QueryTestsUtils;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphBackedSearchIndexer;
import org.apache.hadoop.metadata.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
......
......@@ -57,6 +57,11 @@ import java.util.List;
@Guice(modules = RepositoryMetadataModule.class)
public class HiveLineageServiceTest {
static {
// this would override super types creation if not first thing
TypeSystem.getInstance().reset();
}
@Inject
private DefaultMetadataService metadataService;
......@@ -71,8 +76,6 @@ public class HiveLineageServiceTest {
@BeforeClass
public void setUp() throws Exception {
TypeSystem.getInstance().reset();
setUpTypes();
setupInstances();
......@@ -166,6 +169,23 @@ public class HiveLineageServiceTest {
}
@Test
public void testGetInputsGraph() throws Exception {
JSONObject results = new JSONObject(
hiveLineageService.getInputsGraph("sales_fact_monthly_mv"));
Assert.assertNotNull(results);
System.out.println("inputs graph = " + results);
JSONObject values = results.getJSONObject("values");
Assert.assertNotNull(values);
final JSONObject vertices = values.getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 4);
final JSONObject edges = values.getJSONObject("edges");
Assert.assertEquals(edges.length(), 4);
}
@Test
public void testGetOutputs() throws Exception {
JSONObject results = new JSONObject(hiveLineageService.getOutputs("sales_fact"));
Assert.assertNotNull(results);
......@@ -179,6 +199,22 @@ public class HiveLineageServiceTest {
Assert.assertTrue(paths.length() > 0);
}
@Test
public void testGetOutputsGraph() throws Exception {
JSONObject results = new JSONObject(hiveLineageService.getOutputsGraph("sales_fact"));
Assert.assertNotNull(results);
System.out.println("outputs graph = " + results);
JSONObject values = results.getJSONObject("values");
Assert.assertNotNull(values);
final JSONObject vertices = values.getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 3);
final JSONObject edges = values.getJSONObject("edges");
Assert.assertEquals(edges.length(), 4);
}
@DataProvider(name = "tableNamesProvider")
private Object[][] tableNames() {
return new String[][] {
......@@ -247,9 +283,7 @@ public class HiveLineageServiceTest {
);
HierarchicalTypeDefinition<ClassType> tblClsDef =
TypesUtil.createClassTypeDef(HIVE_TABLE_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
attrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"),
attrDef("owner", DataTypes.STRING_TYPE),
attrDef("createTime", DataTypes.INT_TYPE),
attrDef("lastAccessTime", DataTypes.INT_TYPE),
......@@ -265,8 +299,7 @@ public class HiveLineageServiceTest {
);
HierarchicalTypeDefinition<ClassType> loadProcessClsDef =
TypesUtil.createClassTypeDef(HIVE_PROCESS_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"),
attrDef("userName", DataTypes.STRING_TYPE),
attrDef("startTime", DataTypes.INT_TYPE),
attrDef("endTime", DataTypes.INT_TYPE),
......@@ -368,7 +401,7 @@ public class HiveLineageServiceTest {
"sales fact daily materialized view",
reportingDB, sd, "Joe BI", "Managed", salesFactColumns, "Metric");
loadProcess("loadSalesDaily", "John ETL",
loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL",
ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily),
"create table as select ", "plan", "id", "graph",
"ETL");
......@@ -401,7 +434,7 @@ public class HiveLineageServiceTest {
"sales fact monthly materialized view",
reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric");
loadProcess("loadSalesMonthly", "John ETL",
loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL",
ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly),
"create table as select ", "plan", "id", "graph",
"ETL");
......@@ -463,7 +496,7 @@ public class HiveLineageServiceTest {
return createInstance(referenceable);
}
Id loadProcess(String name, String user,
Id loadProcess(String name, String description, String user,
List<Id> inputTables,
List<Id> outputTables,
String queryText, String queryPlan,
......@@ -471,6 +504,7 @@ public class HiveLineageServiceTest {
String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("description", description);
referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis());
referenceable.set("endTime", System.currentTimeMillis() + 10000);
......
......@@ -49,6 +49,8 @@ public abstract class BaseTest {
public static final String STRUCT_TYPE_1 = "t1";
public static final String STRUCT_TYPE_2 = "t2";
public static final String TEST_DATE = "2014-12-11T02:35:58.440Z";
public static final long TEST_DATE_IN_LONG=1418265358440L;
protected IRepository repo;
public static Struct createStruct() throws MetadataException {
......
......@@ -23,9 +23,11 @@ import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Compare;
import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.TestUtils;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.repository.BaseTest;
import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
......@@ -53,11 +55,10 @@ import org.testng.annotations.Test;
import scala.actors.threadpool.Arrays;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import java.util.*;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createStructTypeDef;
/**
* GraphBackedMetadataRepository test
......@@ -154,6 +155,7 @@ public class GraphBackedMetadataRepositoryTest {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
databaseInstance.set("name", DATABASE_NAME);
databaseInstance.set("description", "foo database");
databaseInstance.set("created", new Date(BaseTest.TEST_DATE_IN_LONG));
databaseInstance.set("namespace", "colo:cluster:hive:db");
databaseInstance.set("cluster", "cluster-1");
......@@ -180,6 +182,7 @@ public class GraphBackedMetadataRepositoryTest {
String guid = getGUID();
ITypedReferenceableInstance table = repositoryService.getEntityDefinition(guid);
Assert.assertEquals(table.getDate("created"), new Date(BaseTest.TEST_DATE_IN_LONG));
System.out.println("*** table = " + table);
}
......@@ -349,6 +352,57 @@ public class GraphBackedMetadataRepositoryTest {
}
}
@Test(dependsOnMethods = "testSubmitEntity")
public void testSearchByDSLWithInheritance() throws Exception {
String dslQuery = "Person where name = 'Jane'";
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults);
Assert.assertEquals(results.length(), 3);
System.out.println("results = " + results);
Object query = results.get("query");
Assert.assertNotNull(query);
JSONObject dataType = results.getJSONObject("dataType");
Assert.assertNotNull(dataType);
String typeName = dataType.getString("typeName");
Assert.assertEquals(typeName, "Person");
JSONArray rows = results.getJSONArray("rows");
Assert.assertEquals(rows.length(), 1);
JSONObject row = rows.getJSONObject(0);
Assert.assertEquals(row.getString("$typeName$"), "Manager");
Assert.assertEquals(row.getString("name"), "Jane");
}
@Test(dependsOnMethods = "testCreateEntity")
public void testBug37860() throws Exception {
String dslQuery =
"hive_table as t where name = 'bar' " +
"database where name = 'foo' and description = 'foo database' select t";
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults);
Assert.assertEquals(results.length(), 3);
System.out.println("results = " + results);
Object query = results.get("query");
Assert.assertNotNull(query);
JSONObject dataType = results.getJSONObject("dataType");
Assert.assertNotNull(dataType);
JSONArray rows = results.getJSONArray("rows");
Assert.assertEquals(rows.length(), 1);
}
/**
* Full text search requires GraphBackedSearchIndexer, and GraphBackedSearchIndexer can't be enabled in
* GraphBackedDiscoveryServiceTest because of its test data. So, test for full text search is in
......@@ -399,8 +453,10 @@ public class GraphBackedMetadataRepositoryTest {
TypesUtil.createClassTypeDef(DATABASE_TYPE,
ImmutableList.of(SUPER_TYPE_NAME),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("created", DataTypes.DATE_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
StructTypeDefinition structTypeDefinition =
new StructTypeDefinition("serdeType",
new AttributeDefinition[]{
......@@ -434,6 +490,7 @@ public class GraphBackedMetadataRepositoryTest {
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("created", DataTypes.DATE_TYPE),
// enum
new AttributeDefinition("tableType", "tableType",
Multiplicity.REQUIRED, false, null),
......@@ -497,6 +554,7 @@ public class GraphBackedMetadataRepositoryTest {
tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table");
tableInstance.set("type", "managed");
tableInstance.set("created", new Date(BaseTest.TEST_DATE_IN_LONG));
tableInstance.set("tableType", 1); // enum
// super type
......@@ -561,4 +619,25 @@ public class GraphBackedMetadataRepositoryTest {
ClassType tableType = typeSystem.getDataType(ClassType.class, TABLE_TYPE);
return tableType.convert(tableInstance, Multiplicity.REQUIRED);
}
private String random() {
return RandomStringUtils.random(10);
}
@Test
public void testUTFValues() throws Exception {
Referenceable hrDept = new Referenceable("Department");
Referenceable john = new Referenceable("Person");
john.set("name", random());
john.set("department", hrDept);
hrDept.set("name", random());
hrDept.set("employees", ImmutableList.of(john));
ClassType deptType = typeSystem.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
guid = repositoryService.createEntity(hrDept2);
Assert.assertNotNull(guid);
}
}
......@@ -195,7 +195,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" +
"\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" +
"\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" +
......@@ -227,7 +227,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" +
"\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" +
"\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" +
......@@ -264,7 +264,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" +
"\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" +
"\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" +
......@@ -304,7 +304,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" +
"\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" +
"\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" +
"\tn : \t[1.100000000000000088817841970012523233890533447265625, 1" +
".100000000000000088817841970012523233890533447265625]\n" +
......
......@@ -58,7 +58,7 @@ public class StructTest extends BaseTest {
"\ti : \t1.0\n" +
"\tj : \t1\n" +
"\tk : \t1\n" +
"\tl : \t2014-12-11\n" +
"\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" +
......@@ -101,7 +101,7 @@ public class StructTest extends BaseTest {
"\ti : \t1.0\n" +
"\tj : \t1\n" +
"\tk : \t1\n" +
"\tl : \t2014-12-11\n" +
"\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" +
"\tn : \t[1.100000000000000088817841970012523233890533447265625, 1" +
".100000000000000088817841970012523233890533447265625]\n" +
......
......@@ -143,4 +143,11 @@ class GremlinTest extends FunSuite with BeforeAndAfterAll with BaseGremlinTest {
val e = p("from blah").right.get
an [ExpressionException] should be thrownBy QueryProcessor.evaluate(e, g)
}
test("Bug37860") {
val p = new QueryParser
val e = p("Table as t where name = 'sales_fact' db where name = 'Sales' and owner = 'John ETL' select t").right.get
val r = QueryProcessor.evaluate(e, g)
validateJson(r)
}
}
......@@ -109,7 +109,7 @@ object QueryTestsUtils extends GraphUtils {
def viewClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "View", null,
Array(
attrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("inputTables", "Table", Multiplicity.COLLECTION, false, null)
new AttributeDefinition("inputTables", DataTypes.arrayTypeName("Table"), Multiplicity.COLLECTION, false, null)
))
def dimTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "Dimension", null,
......
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
source ${DIR}/metadata-config.sh
${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${METADATACPPATH} org.apache.hadoop.metadata.util.CredentialProviderUtility
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
# resolve links - $0 may be a softlink
PRG="${0}"
while [ -h "${PRG}" ]; do
ls=`ls -ld "${PRG}"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "${PRG}"`/"$link"
fi
done
BASEDIR=`dirname ${PRG}`
BASEDIR=`cd ${BASEDIR}/..;pwd`
if [ -z "$METADATA_CONF" ]; then
METADATA_CONF=${BASEDIR}/conf
fi
export METADATA_CONF
if [ -f "${METADATA_CONF}/metadata-env.sh" ]; then
. "${METADATA_CONF}/metadata-env.sh"
fi
if test -z ${JAVA_HOME}
then
JAVA_BIN=`which java`
JAR_BIN=`which jar`
else
JAVA_BIN=${JAVA_HOME}/bin/java
JAR_BIN=${JAVA_HOME}/bin/jar
fi
export JAVA_BIN
if [ ! -e $JAVA_BIN ] || [ ! -e $JAR_BIN ]; then
echo "$JAVA_BIN and/or $JAR_BIN not found on the system. Please make sure java and jar commands are available."
exit 1
fi
# default the heap size to 1GB
DEFAULT_JAVA_HEAP_MAX=-Xmx1024m
METADATA_OPTS="$DEFAULT_JAVA_HEAP_MAX $METADATA_OPTS"
METADATACPPATH="$METADATA_CONF"
METADATA_EXPANDED_WEBAPP_DIR=${METADATA_EXPANDED_WEBAPP_DIR:-${BASEDIR}/server/webapp}
export METADATA_EXPANDED_WEBAPP_DIR
# set the server classpath
if [ ! -d ${METADATA_EXPANDED_WEBAPP_DIR}/metadata/WEB-INF ]; then
mkdir -p ${METADATA_EXPANDED_WEBAPP_DIR}/metadata
cd ${METADATA_EXPANDED_WEBAPP_DIR}/metadata
$JAR_BIN -xf ${BASEDIR}/server/webapp/metadata.war
cd -
fi
METADATACPPATH="${METADATACPPATH}:${METADATA_EXPANDED_WEBAPP_DIR}/metadata/WEB-INF/classes"
METADATACPPATH="${METADATACPPATH}:${METADATA_EXPANDED_WEBAPP_DIR}/metadata/WEB-INF/lib/*:${BASEDIR}/libext/*"
# log and pid dirs for applications
METADATA_LOG_DIR="${METADATA_LOG_DIR:-$BASEDIR/logs}"
export METADATA_LOG_DIR
METADATA_PID_DIR="${METADATA_PID_DIR:-$BASEDIR/logs}"
# create the pid dir if its not there
[ -w "$METADATA_PID_DIR" ] || mkdir -p "$METADATA_PID_DIR"
export METADATA_PID_DIR
METADATA_PID_FILE=${METADATA_PID_DIR}/metadata.pid
export METADATA_PID_FILE
METADATA_DATA_DIR=${METADATA_DATA_DIR:-${BASEDIR}/data}
METADATA_HOME_DIR="${METADATA_HOME_DIR:-$BASEDIR}"
export METADATA_HOME_DIR
# make sure the process is not running
if [ -f $METADATA_PID_FILE ]; then
if kill -0 `cat $METADATA_PID_FILE` > /dev/null 2>&1; then
echo metadata running as process `cat $METADATA_PID_FILE`. Stop it first.
exit 1
fi
fi
mkdir -p $METADATA_LOG_DIR
pushd ${BASEDIR} > /dev/null
JAVA_PROPERTIES="$METADATA_OPTS $METADATA_PROPERTIES -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.home=${METADATA_HOME_DIR} -Dmetadata.conf=${METADATA_CONF} -Dmetadata.log.file=application.log"
shift
while [[ ${1} =~ ^\-D ]]; do
JAVA_PROPERTIES="${JAVA_PROPERTIES} ${1}"
shift
done
TIME=`date +%Y%m%d%H%M%s`
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
source ${DIR}/metadata-config.sh
nohup ${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${METADATACPPATH} org.apache.hadoop.metadata.Main -app ${METADATA_EXPANDED_WEBAPP_DIR}/metadata $* > "${METADATA_LOG_DIR}/metadata-server.$TIME.out" 2>&1 &
echo $! > $METADATA_PID_FILE
popd > /dev/null
echo Metadata Server started!!!
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
# resolve links - $0 may be a softlink
PRG="${0}"
while [ -h "${PRG}" ]; do
ls=`ls -ld "${PRG}"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "${PRG}"`/"$link"
fi
done
BASEDIR=`dirname ${PRG}`
BASEDIR=`cd ${BASEDIR}/..;pwd`
METADATA_PID_DIR="${METADATA_PID_DIR:-$BASEDIR/logs}"
# create the pid dir if its not there
[ -w "$METADATA_PID_DIR" ] || mkdir -p "$METADATA_PID_DIR"
export METADATA_PID_DIR
METADATA_PID_FILE=${METADATA_PID_DIR}/metadata.pid
export METADATA_PID_FILE
if [ -f $METADATA_PID_FILE ]
then
kill -15 `cat $METADATA_PID_FILE`
echo Metadata Server stopped
rm -rf $METADATA_PID_FILE
else
echo "pid file $METADATA_PID_FILE not present"
fi
......@@ -109,9 +109,10 @@ def executeEnvSh(confDir):
proc.communicate()
def java(classname, args, classpath, jvm_opts_list):
if os.environ["JAVA_HOME"] is not None and os.environ["JAVA_HOME"]:
prg = os.path.join(os.environ["JAVA_HOME"], "bin", "java")
def java(classname, args, classpath, jvm_opts_list, logdir=None):
java_home = os.environ.get("JAVA_HOME", None)
if java_home:
prg = os.path.join(java_home, "bin", "java")
else:
prg = which("java")
......@@ -121,11 +122,12 @@ def java(classname, args, classpath, jvm_opts_list):
commandline.append(classpath)
commandline.append(classname)
commandline.extend(args)
return runProcess(commandline)
return runProcess(commandline, logdir)
def jar(path):
if os.environ["JAVA_HOME"] is not None and os.environ["JAVA_HOME"]:
prg = os.path.join(os.environ["JAVA_HOME"], "bin", "jar")
java_home = os.environ.get("JAVA_HOME", None)
if java_home:
prg = os.path.join(java_home, "bin", "jar")
else:
prg = which("jar")
......@@ -153,7 +155,7 @@ def which(program):
return None
def runProcess(commandline):
def runProcess(commandline, logdir=None):
"""
Run a process
:param commandline: command line
......@@ -161,7 +163,13 @@ def runProcess(commandline):
"""
global finished
debug ("Executing : %s" % commandline)
return subprocess.Popen(commandline)
timestr = time.strftime("metadata.%Y%m%d-%H%M%S")
stdoutFile = None
stderrFile = None
if logdir:
stdoutFile = open(os.path.join(logdir, timestr + ".out"), "w")
stderrFile = open(os.path.join(logdir,timestr + ".err"), "w")
return subprocess.Popen(commandline, stdout=stdoutFile, stderr=stderrFile)
def print_output(name, src, toStdErr):
"""
......
......@@ -17,6 +17,7 @@
# limitations under the License.
import os
import sys
import traceback
import metadata_config as mc
......@@ -29,8 +30,8 @@ def main():
metadata_home = mc.metadataDir()
confdir = mc.dirMustExist(mc.confDir(metadata_home))
logdir = mc.dirMustExist(mc.logDir(metadata_home))
mc.executeEnvSh(confdir)
logdir = mc.dirMustExist(mc.logDir(metadata_home))
#create sys property for conf dirs
jvm_opts_list = (METADATA_LOG_OPTS % logdir).split()
......@@ -64,7 +65,7 @@ def main():
args = ["-app", os.path.join(web_app_dir, "metadata")]
args.extend(sys.argv[1:])
process = mc.java("org.apache.hadoop.metadata.Main", args, metadata_classpath, jvm_opts_list)
process = mc.java("org.apache.hadoop.metadata.Main", args, metadata_classpath, jvm_opts_list, logdir)
mc.writePid(metadata_pid_file, process)
print "Metadata Server started!!!\n"
......@@ -74,6 +75,7 @@ if __name__ == '__main__':
returncode = main()
except Exception as e:
print "Exception: %s " % str(e)
print traceback.format_exc()
returncode = -1
sys.exit(returncode)
......@@ -18,6 +18,7 @@
import os
from signal import SIGTERM
import sys
import traceback
import metadata_config as mc
......@@ -25,8 +26,8 @@ def main():
metadata_home = mc.metadataDir()
confdir = mc.dirMustExist(mc.confDir(metadata_home))
piddir = mc.dirMustExist(mc.logDir(metadata_home))
mc.executeEnvSh(confdir)
piddir = mc.dirMustExist(mc.logDir(metadata_home))
metadata_pid_file = mc.pidFile(metadata_home)
......@@ -52,6 +53,7 @@ if __name__ == '__main__':
returncode = main()
except Exception as e:
print "Exception: %s " % str(e)
print traceback.format_exc()
returncode = -1
sys.exit(returncode)
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
# resolve links - $0 may be a softlink
PRG="${0}"
while [ -h "${PRG}" ]; do
ls=`ls -ld "${PRG}"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "${PRG}"`/"$link"
fi
done
BASEDIR=`dirname ${PRG}`
BASEDIR=`cd ${BASEDIR}/..;pwd`
if [ -z "$METADATA_CONF" ]; then
METADATA_CONF=${BASEDIR}/conf
fi
export METADATA_CONF
if [ -f "${METADATA_CONF}/metadata-env.sh" ]; then
. "${METADATA_CONF}/metadata-env.sh"
fi
if test -z ${JAVA_HOME}
then
JAVA_BIN=`which java`
JAR_BIN=`which jar`
else
JAVA_BIN=${JAVA_HOME}/bin/java
JAR_BIN=${JAVA_HOME}/bin/jar
fi
export JAVA_BIN
if [ ! -e $JAVA_BIN ] || [ ! -e $JAR_BIN ]; then
echo "$JAVA_BIN and/or $JAR_BIN not found on the system. Please make sure java and jar commands are available."
exit 1
fi
# default the heap size to 1GB
DEFAULT_JAVA_HEAP_MAX=-Xmx1024m
METADATA_OPTS="$DEFAULT_JAVA_HEAP_MAX $METADATA_OPTS"
METADATACPPATH="$METADATA_CONF"
METADATA_EXPANDED_WEBAPP_DIR=${METADATA_EXPANDED_WEBAPP_DIR:-${BASEDIR}/server/webapp}
export METADATA_EXPANDED_WEBAPP_DIR
METADATACPPATH="${METADATACPPATH}:${METADATA_EXPANDED_WEBAPP_DIR}/metadata/WEB-INF/classes"
METADATACPPATH="${METADATACPPATH}:${METADATA_EXPANDED_WEBAPP_DIR}/metadata/WEB-INF/lib/*:${BASEDIR}/libext/*"
# log and pid dirs for applications
METADATA_LOG_DIR="${METADATA_LOG_DIR:-$BASEDIR/logs}"
export METADATA_LOG_DIR
METADATA_HOME_DIR="${METADATA_HOME_DIR:-$BASEDIR}"
export METADATA_HOME_DIR
JAVA_PROPERTIES="$METADATA_OPTS $METADATA_PROPERTIES -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.home=${METADATA_HOME_DIR}"
${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${METADATACPPATH} org.apache.hadoop.metadata.examples.QuickStart
echo Example data added to Metadata Server!!!
......@@ -28,8 +28,8 @@ def main():
metadata_home = mc.metadataDir()
confdir = mc.dirMustExist(mc.confDir(metadata_home))
logdir = mc.dirMustExist(mc.logDir(metadata_home))
mc.executeEnvSh(confdir)
logdir = mc.dirMustExist(mc.logDir(metadata_home))
#create sys property for conf dirs
jvm_opts_list = (METADATA_LOG_OPTS % logdir).split()
......
......@@ -29,14 +29,12 @@ metadata.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs #########
# This models follows the quick-start guide
metadata.lineage.hive.table.type.name=hive_table
# This models reflects the base super types for Data and Process
metadata.lineage.hive.table.type.name=DataSet
metadata.lineage.hive.table.column.name=columns
metadata.lineage.hive.process.type.name=hive_process
metadata.lineage.hive.process.type.name=Process
metadata.lineage.hive.process.inputs.name=inputTables
metadata.lineage.hive.process.outputs.name=outputTables
#Currently unused
#metadata.lineage.hive.column.type.name=Column
######### Security Properties #########
......
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
######### Security Properties #########
# SSL config
metadata.enableTLS=false
truststore.file=/path/to/truststore.jks
cert.stores.credential.provider.path=jceks://file/path/to/credentialstore.jceks
# following only required for 2-way SSL
keystore.file=/path/to/keystore.jks
# Authentication config
# enabled: true or false
metadata.http.authentication.enabled=false
# type: simple or kerberos
metadata.http.authentication.type=simple
######### Security Properties #########
......@@ -50,13 +50,13 @@ class TestMetadata(unittest.TestCase):
'org.apache.hadoop.metadata.Main',
['-app', 'metadata_home/server/webapp/metadata'],
'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib\\*:metadata_home/libext\\*',
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'])
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs')
else:
java_mock.assert_called_with(
'org.apache.hadoop.metadata.Main',
['-app', 'metadata_home/server/webapp/metadata'],
'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib/*:metadata_home/libext/*',
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'])
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs')
pass
......
......@@ -120,7 +120,7 @@ class DSLTest {
Assert.assertEquals(s"${i.o.asInstanceOf[java.util.Map[_, _]].keySet}", "[b, a]")
// 5. Serialize mytype instance to Json
Assert.assertEquals(s"${pretty(render(i))}", "{\n \"$typeName$\":\"mytype\",\n \"e\":1," + "\n \"n\":[1,1.100000000000000088817841970012523233890533447265625],\n \"h\":1.0,\n \"b\":true,\n \"k\":1,\n \"j\":1,\n \"d\":2,\n \"m\":[1,1],\n \"g\":1,\n \"a\":1,\n \"i\":1.0,\n \"c\":1,\n \"l\":\"2014-12-03\",\n \"f\":1,\n \"o\":{\n \"b\":2.0,\n \"a\":1.0\n }\n}")
Assert.assertEquals(s"${pretty(render(i))}", "{\n \"$typeName$\":\"mytype\",\n \"e\":1," + "\n \"n\":[1,1.100000000000000088817841970012523233890533447265625],\n \"h\":1.0,\n \"b\":true,\n \"k\":1,\n \"j\":1,\n \"d\":2,\n \"m\":[1,1],\n \"g\":1,\n \"a\":1,\n \"i\":1.0,\n \"c\":1,\n \"l\":\"2014-12-03T08:00:00.000Z\",\n \"f\":1,\n \"o\":{\n \"b\":2.0,\n \"a\":1.0\n }\n}")
}
@Test def test2 {
......
......@@ -124,6 +124,11 @@
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
</dependencies>
<build>
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata;
import java.util.Arrays;
import java.util.Collection;
public class ParamChecker {
/**
* Check that a value is not null. If null throws an IllegalArgumentException.
*
* @param obj value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static <T> T notNull(T obj, String name) {
if (obj == null) {
throw new IllegalArgumentException(name + " cannot be null");
}
return obj;
}
/**
* Check that a list is not null and that none of its elements is null. If null or if the list has emtpy elements
* throws an IllegalArgumentException.
* @param list the list of T.
* @param name parameter name for the exception message.
*/
public static <T> Collection<T> notNullElements(Collection<T> list, String name) {
notEmpty(list, name);
for (T ele : list) {
notNull(ele, String.format("Collection %s element %s", name, ele));
}
return list;
}
/**
* Check that a list is not null and that none of its elements is null. If null or if the list has emtpy elements
* throws an IllegalArgumentException.
* @param array the array of T.
* @param name parameter name for the exception message.
*/
public static <T> T[] notNullElements(T[] array, String name) {
notEmpty(Arrays.asList(array), name);
for (T ele : array) {
notNull(ele, String.format("Collection %s element %s", name, ele));
}
return array;
}
/**
* Check that a list is not null and not empty.
* @param list the list of T.
* @param name parameter name for the exception message.
*/
public static <T> Collection<T> notEmpty(Collection<T> list, String name) {
notNull(list, name);
if (list.isEmpty()) {
throw new IllegalArgumentException(String.format("Collection %s is empty", name));
}
return list;
}
/**
* Check that a string is not null and not empty. If null or emtpy throws an IllegalArgumentException.
*
* @param value value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static String notEmpty(String value, String name) {
return notEmpty(value, name, null);
}
/**
* Check that a string is not empty if its not null.
*
* @param value value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static String notEmptyIfNotNull(String value, String name) {
return notEmptyIfNotNull(value, name, null);
}
/**
* Check that a string is not empty if its not null.
*
* @param value value.
* @param name parameter name for the exception message.
* @return the given value.
*/
public static String notEmptyIfNotNull(String value, String name, String info) {
if (value == null) {
return value;
}
if (value.trim().length() == 0) {
throw new IllegalArgumentException(name + " cannot be empty" + (info == null ? "" : ", " + info));
}
return value.trim();
}
/**
* Check that a string is not null and not empty. If null or emtpy throws an IllegalArgumentException.
*
* @param value value.
* @param name parameter name for the exception message.
* @param info additional information to be printed with the exception message
* @return the given value.
*/
public static String notEmpty(String value, String name, String info) {
if (value == null) {
throw new IllegalArgumentException(name + " cannot be null" + (info == null ? "" : ", " + info));
}
return notEmptyIfNotNull(value, name, info);
}
/**
* Check that a list is not null and that none of its elements is null. If null or if the list has emtpy elements
* throws an IllegalArgumentException.
* @param list the list of strings.
* @param name parameter name for the exception message.
*/
public static Collection<String> notEmptyElements(Collection<String> list, String name) {
notEmpty(list, name);
for (String ele : list) {
notEmpty(ele, String.format("list %s element %s", name, ele));
}
return list;
}
}
......@@ -18,7 +18,7 @@
package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.base.Preconditions;
import org.apache.hadoop.metadata.ParamChecker;
public final class AttributeDefinition {
......@@ -44,16 +44,13 @@ public final class AttributeDefinition {
public AttributeDefinition(String name, String dataTypeName,
Multiplicity multiplicity, boolean isComposite, boolean isUnique,
boolean isIndexable, String reverseAttributeName) {
Preconditions.checkNotNull(name);
Preconditions.checkNotNull(dataTypeName);
this.name = name;
this.dataTypeName = dataTypeName;
this.name = ParamChecker.notEmpty(name, "Attribute name");
this.dataTypeName = ParamChecker.notEmpty(dataTypeName, "Attribute type");
this.multiplicity = multiplicity;
this.isComposite = isComposite;
this.isUnique = isUnique;
this.isIndexable = isIndexable;
this.reverseAttributeName = reverseAttributeName;
this.reverseAttributeName = ParamChecker.notEmptyIfNotNull(reverseAttributeName, "Reverse attribute name");
}
@Override
......
......@@ -38,7 +38,6 @@ public class AttributeInfo {
private IDataType dataType;
AttributeInfo(TypeSystem t, AttributeDefinition def, Map<String, IDataType> tempTypes) throws MetadataException {
TypeUtils.validateName(def.name);
this.name = def.name;
this.dataType = (tempTypes != null && tempTypes.containsKey(def.dataTypeName)) ?
tempTypes.get(def.dataTypeName) : t.getDataType(IDataType.class, def.dataTypeName);
......
......@@ -508,7 +508,7 @@ public class DataTypes {
TypeSystem.getInstance().allowNullsInCollections()
? Multiplicity.OPTIONAL : Multiplicity.REQUIRED));
}
return b.build();
return m.isUnique ? b.build().asList() : b.build();
} else {
try {
return ImmutableList.of(elemType.convert(val,
......
......@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.metadata.MetadataException;
import scala.math.BigInt;
public class EnumType extends AbstractDataType<EnumValue> {
......@@ -54,7 +55,7 @@ public class EnumType extends AbstractDataType<EnumValue> {
EnumValue e = null;
if (val instanceof EnumValue) {
e = valueMap.get(((EnumValue)val).value);
} else if ( val instanceof Integer) {
} else if ( val instanceof Integer || val instanceof BigInt) {
e = ordinalMap.get(val);
} else if ( val instanceof String) {
e = valueMap.get(val);
......
......@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.ParamChecker;
import java.util.Arrays;
public final class EnumTypeDefinition {
......@@ -26,8 +28,8 @@ public final class EnumTypeDefinition {
public final EnumValue[] enumValues;
public EnumTypeDefinition(String name, EnumValue... enumValues) {
this.name = name;
this.enumValues = enumValues;
this.name = ParamChecker.notEmpty(name, "Enum type name");
this.enumValues = ParamChecker.notNullElements(enumValues, "Enum values");
}
@Override
......
......@@ -18,13 +18,15 @@
package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.ParamChecker;
public class EnumValue {
public final String value;
public final int ordinal;
public EnumValue(String value, int ordinal) {
this.value = value;
this.value = ParamChecker.notEmpty(value, "Enum value");
this.ordinal = ordinal;
}
......
......@@ -47,7 +47,7 @@ public class HierarchicalTypeDefinition<T extends HierarchicalType> extends Stru
public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType,
String typeName, ImmutableList<String> superTypes,
AttributeDefinition[] attributeDefinitions) {
super(typeName, attributeDefinitions);
super(typeName, false, attributeDefinitions);
hierarchicalMetaTypeName = hierarchicalMetaType.getName();
this.superTypes = superTypes == null ? ImmutableList.<String>of() : superTypes;
}
......
......@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.ParamChecker;
import java.util.Arrays;
public class StructTypeDefinition {
......@@ -25,12 +27,19 @@ public class StructTypeDefinition {
public final String typeName;
public final AttributeDefinition[] attributeDefinitions;
public StructTypeDefinition(String typeName,
AttributeDefinition[] attributeDefinitions) {
this.typeName = typeName;
protected StructTypeDefinition(String typeName, boolean validate, AttributeDefinition... attributeDefinitions) {
this.typeName = ParamChecker.notEmpty(typeName, "Struct type name");
if (attributeDefinitions != null && attributeDefinitions.length != 0) {
ParamChecker.notNullElements(attributeDefinitions, "Attribute definitions");
}
this.attributeDefinitions = attributeDefinitions;
}
public StructTypeDefinition(String typeName, AttributeDefinition[] attributeDefinitions) {
this.typeName = ParamChecker.notEmpty(typeName, "Struct type name");
this.attributeDefinitions = ParamChecker.notNullElements(attributeDefinitions, "Attribute definitions");
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
......
......@@ -47,7 +47,7 @@ public class TypeSystem {
private static ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal() {
@Override
public SimpleDateFormat initialValue() {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
return dateFormat;
}
......@@ -291,6 +291,7 @@ public class TypeSystem {
throw new MetadataException(
String.format("Redefinition of type %s not supported", eDef.name));
}
EnumType eT = new EnumType(this, eDef.name, eDef.enumValues);
types.put(eDef.name, eT);
typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.ENUM, eDef.name);
......@@ -352,7 +353,6 @@ public class TypeSystem {
private void step1() throws MetadataException {
for (StructTypeDefinition sDef : structDefs) {
assert sDef.typeName != null;
TypeUtils.validateName(sDef.typeName);
if (dataType(sDef.typeName) != null) {
throw new MetadataException(
String.format("Cannot redefine type %s", sDef.typeName));
......@@ -365,7 +365,6 @@ public class TypeSystem {
for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) {
assert traitDef.typeName != null;
TypeUtils.validateName(traitDef.typeName);
if (types.containsKey(traitDef.typeName)) {
throw new MetadataException(
String.format("Cannot redefine type %s", traitDef.typeName));
......@@ -380,7 +379,6 @@ public class TypeSystem {
for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) {
assert classDef.typeName != null;
TypeUtils.validateName(classDef.typeName);
if (types.containsKey(classDef.typeName)) {
throw new MetadataException(
String.format("Cannot redefine type %s", classDef.typeName));
......@@ -464,6 +462,14 @@ public class TypeSystem {
}
}
if (info.multiplicity.upper > 1 && !(
info.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP ||
info.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY)) {
throw new MetadataException(
String.format("A multiplicty of more than one requires a collection type for attribute '%s'",
info.name));
}
return info;
}
......
......@@ -50,13 +50,6 @@ public class TypeUtils {
}
}
public static void validateName(String name) throws MetadataException {
if (!NAME_PATTERN.matcher(name).matches()) {
throw new MetadataException(
String.format("Unsupported name for an attribute '%s'", name));
}
}
public static String parseAsArrayType(String typeName) {
Matcher m = ARRAY_TYPE_NAME_PATTERN.matcher(typeName);
return m.matches() ? m.group(1) : null;
......
......@@ -85,7 +85,7 @@
</logger>
<root>
<priority value="debug"/>
<priority value="info"/>
<appender-ref ref="console"/>
</root>
......
......@@ -126,7 +126,7 @@ object TypesSerialization {
private def convertAttributeInfoToAttributeDef(aInfo: AttributeInfo) = {
new AttributeDefinition(aInfo.name, aInfo.dataType().getName, aInfo.multiplicity,
aInfo.isComposite, aInfo.reverseAttributeName)
aInfo.isComposite, aInfo.isUnique, aInfo.isIndexable, aInfo.reverseAttributeName)
}
private def convertEnumTypeToEnumTypeDef(et: EnumType) = {
......
......@@ -37,6 +37,8 @@ public abstract class BaseTest {
public static final String STRUCT_TYPE_1 = "t1";
public static final String STRUCT_TYPE_2 = "t2";
public static final String TEST_DATE = "2014-12-11T02:35:58.440Z";
public static final long TEST_DATE_IN_LONG=1418265358440L;
public static Struct createStruct() throws MetadataException {
StructType structType = TypeSystem.getInstance().getDataType(
......
......@@ -179,7 +179,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" +
"\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" +
"\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" +
......@@ -211,7 +211,7 @@ public class EnumTest extends BaseTest {
"\tj : \t1\n" +
"\tk : \t1\n" +
"\tenum3 : \tCOMMITTED\n" +
"\tl : \t2014-12-11\n" +
"\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" +
......
......@@ -54,7 +54,7 @@ public class StructTest extends BaseTest {
"\ti : \t1.0\n" +
"\tj : \t1\n" +
"\tk : \t1\n" +
"\tl : \t2014-12-11\n" +
"\tl : \t" + TEST_DATE + "\n" +
"\tm : \t[1, 1]\n" +
"\tn : \t[1.1, 1.1]\n" +
"\to : \t{b=2.0, a=1.0}\n" +
......
......@@ -19,6 +19,7 @@
package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
......@@ -29,6 +30,11 @@ import scala.actors.threadpool.Arrays;
import java.util.Collections;
import java.util.List;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createStructTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef;
public class TypeSystemTest extends BaseTest {
@BeforeClass
......@@ -100,4 +106,34 @@ public class TypeSystemTest extends BaseTest {
Assert.assertFalse(Collections.disjoint(traitsNames, traits));
}
private String random() {
return RandomStringUtils.random(10);
}
@Test
public void testUTFNames() throws Exception {
TypeSystem ts = getTypeSystem();
String enumType = random();
EnumTypeDefinition orgLevelEnum =
new EnumTypeDefinition(enumType, new EnumValue(random(), 1), new EnumValue(random(), 2));
ts.defineEnumType(orgLevelEnum);
String structName = random();
String attrType = random();
StructTypeDefinition structType = createStructTypeDef(structName,
createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
String className = random();
HierarchicalTypeDefinition<ClassType> classType =
createClassTypeDef(className, ImmutableList.<String>of(),
createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
String traitName = random();
HierarchicalTypeDefinition<TraitType> traitType = createTraitTypeDef(traitName,
ImmutableList.<String>of(), createRequiredAttrDef(attrType, DataTypes.INT_TYPE));
ts.defineTypes(ImmutableList.of(structType), ImmutableList.of(traitType), ImmutableList.of(classType));
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
public class ValidationTest {
@DataProvider(name = "attributeData")
private Object[][] createAttributeData() {
return new String[][]{
{null, "type"}, {"", "type"}, {"name", null}, {"name", ""}};
}
@Test (dataProvider = "attributeData", expectedExceptions = {IllegalArgumentException.class})
public void testAttributes(String name, String type) {
TypesUtil.createRequiredAttrDef(name, type);
}
@DataProvider(name = "enumValueData")
private Object[][] createEnumValueData() {
return new String[][]{{null}, {""}};
}
@Test (dataProvider = "enumValueData", expectedExceptions = {IllegalArgumentException.class})
public void testEnumValue(String name) {
new EnumValue(name, 1);
}
@DataProvider(name = "enumTypeData")
private Object[][] createEnumTypeData() {
EnumValue value = new EnumValue("name", 1);
return new Object[][]{{null, value}, {"", value}, {"name"}};
}
@Test (dataProvider = "enumTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testEnumType(String name, EnumValue... values) {
new EnumTypeDefinition(name, values);
}
@DataProvider(name = "structTypeData")
private Object[][] createStructTypeData() {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");
return new Object[][]{{null, value}, {"", value}, {"name"}};
}
@Test (dataProvider = "structTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testStructType(String name, AttributeDefinition... values) {
new StructTypeDefinition(name, values);
}
@DataProvider(name = "classTypeData")
private Object[][] createClassTypeData() {
return new Object[][]{{null}, {""}};
}
@Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testClassType(String name) {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");;
TypesUtil.createClassTypeDef(name, ImmutableList.of("super"), value);
}
@Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
public void testTraitType(String name) {
AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");;
TypesUtil.createTraitTypeDef(name, ImmutableList.of("super"), value);
}
@Test
public void testValidTypes() {
AttributeDefinition attribute = TypesUtil.createRequiredAttrDef("name", "type");
//class with no attributes
TypesUtil.createClassTypeDef("name", ImmutableList.of("super"));
//class with no super types
TypesUtil.createClassTypeDef("name", ImmutableList.<String>of(), attribute);
//trait with no attributes
TypesUtil.createTraitTypeDef("name", ImmutableList.of("super"));
//trait with no super types
TypesUtil.createTraitTypeDef("name", ImmutableList.<String>of(), attribute);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.typesystem.builders
import org.apache.hadoop.metadata.MetadataException
import org.apache.hadoop.metadata.typesystem.types.{Multiplicity, ClassType, TypeSystem}
import org.scalatest.{BeforeAndAfterAll, FunSuite}
class MultiplicityTest extends FunSuite with BeforeAndAfterAll {
override def beforeAll() = {
TypeSystem.getInstance().reset()
val b = new TypesBuilder
import b._
val tDef = types {
_trait("Dimension") {}
_trait("PII") {}
_trait("Metric") {}
_trait("ETL") {}
_trait("JdbcAccess") {}
_class("DB") {
"name" ~ (string, required, indexed, unique)
"owner" ~ (string)
"createTime" ~ (int)
}
_class("StorageDesc") {
"inputFormat" ~ (string, required)
"outputFormat" ~ (string, required)
}
_class("Column") {
"name" ~ (string, required)
"dataType" ~ (string, required)
"sd" ~ ("StorageDesc", required)
}
_class("Table", List()) {
"name" ~ (string, required, indexed)
"db" ~ ("DB", required)
"sd" ~ ("StorageDesc", required)
}
_class("LoadProcess") {
"name" ~ (string, required)
"inputTables" ~ (array("Table"), collection)
"outputTable" ~ ("Table", required)
}
_class("View") {
"name" ~ (string, required)
"inputTables" ~ (array("Table"), collection)
}
_class("AT") {
"name" ~ (string, required)
"stringSet" ~ (array("string"), multiplicty(0, Int.MaxValue, true))
}
}
TypeSystem.getInstance().defineTypes(tDef)
}
test("test1") {
val b = new InstanceBuilder
import b._
val instances = b create {
val a = instance("AT") { // use instance to create Referenceables. use closure to
// set attributes of instance
'name ~ "A1" // use '~' to set attributes. Use a Symbol (names starting with ') for
'stringSet ~ Seq("a", "a")
}
}
val ts = TypeSystem.getInstance()
import scala.collection.JavaConversions._
val typedInstances = instances.map { i =>
val iTyp = ts.getDataType(classOf[ClassType], i.getTypeName)
iTyp.convert(i, Multiplicity.REQUIRED)
}
typedInstances.foreach { i =>
println(i)
}
}
test("WrongMultiplicity") {
val b = new TypesBuilder
import b._
val tDef = types {
_class("Wrong") {
"name" ~ (string, required)
"stringSet" ~ (string, multiplicty(0, Int.MaxValue, true))
}
}
val me = intercept[MetadataException] {
TypeSystem.getInstance().defineTypes(tDef)
}
assert("A multiplicty of more than one requires a collection type for attribute 'stringSet'" == me.getMessage)
}
}
......@@ -44,18 +44,18 @@ class SerializationTest extends BaseTest {
val s: Struct = BaseTest.createStruct()
val ts: ITypedStruct = structType.convert(s, Multiplicity.REQUIRED)
Assert.assertEquals(ts.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-11\n\tm : \t[1, 1]\n\tn : \t[1.1, 1.1]\n\to : \t{b=2.0, a=1.0}\n}")
Assert.assertEquals(ts.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t" + BaseTest.TEST_DATE + "\n\tm : \t[1, 1]\n\tn : \t[1.1, 1.1]\n\to : \t{b=2.0, a=1.0}\n}")
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new BigDecimalSerializer + new BigIntegerSerializer
//Json representation
val ser = swrite(ts)
Assert.assertEquals(ser, "{\"$typeName$\":\"t1\",\"e\":1,\"n\":[1.1,1.1],\"h\":1.0,\"b\":true,\"k\":1,\"j\":1,\"d\":2,\"m\":[1,1],\"g\":1,\"a\":1,\"i\":1.0,\"c\":1,\"l\":\"2014-12-11T02:35:58.440Z\",\"f\":1,\"o\":{\"b\":2.0,\"a\":1.0}}")
Assert.assertEquals(ser, "{\"$typeName$\":\"t1\",\"e\":1,\"n\":[1.1,1.1],\"h\":1.0,\"b\":true,\"k\":1,\"j\":1,\"d\":2,\"m\":[1,1],\"g\":1,\"a\":1,\"i\":1.0,\"c\":1,\"l\":\"" + BaseTest.TEST_DATE + "\",\"f\":1,\"o\":{\"b\":2.0,\"a\":1.0}}")
// Typed Struct read back
val ts1 = read[StructInstance](ser)
Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-11\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}")
Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t" + BaseTest.TEST_DATE + "\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}")
}
@Test def test2 {
......@@ -70,7 +70,7 @@ class SerializationTest extends BaseTest {
{"$typeName$":"t1","e":1,"n":[1.1,1.1],"h":1.0,"b":true,"k":1,"j":1,"d":2,"m":[1,1],"g":1,"a":1,"i":1.0,
"c":1,"l":"2014-12-03T19:38:55.053Z","f":1,"o":{"b":2.0,"a":1.0}}""")
// Typed Struct read from string
Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-03\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}")
Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-03T19:38:55.053Z\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}")
}
@Test def testTrait {
......
......@@ -104,6 +104,11 @@
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>com.tinkerpop.blueprints</groupId>
<artifactId>blueprints-core</artifactId>
</dependency>
......
......@@ -39,6 +39,7 @@ public final class Main {
private static final String APP_PATH = "app";
private static final String APP_PORT = "port";
private static final String METADATA_HOME = "metadata.home";
private static final String METADATA_LOG_DIR = "metadata.log.dir";
/**
* Prevent users from constructing this.
......@@ -87,6 +88,9 @@ public final class Main {
if (System.getProperty(METADATA_HOME) == null) {
System.setProperty(METADATA_HOME, "target");
}
if (System.getProperty(METADATA_LOG_DIR) == null) {
System.setProperty(METADATA_LOG_DIR, "target/logs");
}
}
public static String getProjectVersion(PropertiesConfiguration buildConfiguration) {
......
......@@ -128,9 +128,7 @@ public class QuickStart {
);
HierarchicalTypeDefinition<ClassType> tblClsDef =
TypesUtil.createClassTypeDef(TABLE_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
attrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createClassTypeDef(TABLE_TYPE, ImmutableList.of("DataSet"),
new AttributeDefinition("db", DATABASE_TYPE,
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("sd", STORAGE_DESC_TYPE,
......@@ -149,8 +147,7 @@ public class QuickStart {
);
HierarchicalTypeDefinition<ClassType> loadProcessClsDef =
TypesUtil.createClassTypeDef(LOAD_PROCESS_TYPE, null,
attrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createClassTypeDef(LOAD_PROCESS_TYPE, ImmutableList.of("Process"),
attrDef("userName", DataTypes.STRING_TYPE),
attrDef("startTime", DataTypes.INT_TYPE),
attrDef("endTime", DataTypes.INT_TYPE),
......@@ -273,7 +270,7 @@ public class QuickStart {
"sales fact daily materialized view", reportingDB, sd,
"Joe BI", "Managed", salesFactColumns, "Metric");
loadProcess("loadSalesDaily", "John ETL",
loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL",
ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily),
"create table as select ", "plan", "id", "graph",
"ETL");
......@@ -288,7 +285,7 @@ public class QuickStart {
"sales fact monthly materialized view",
reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric");
loadProcess("loadSalesMonthly", "John ETL",
loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL",
ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly),
"create table as select ", "plan", "id", "graph",
"ETL");
......@@ -300,7 +297,7 @@ public class QuickStart {
String entityJSON = InstanceSerialization.toJson(referenceable, true);
System.out.println("Submitting new entity= " + entityJSON);
JSONObject jsonObject = metadataServiceClient.createEntity(entityJSON);
String guid = jsonObject.getString(MetadataServiceClient.RESULTS);
String guid = jsonObject.getString(MetadataServiceClient.GUID);
System.out.println("created instance for type " + typeName + ", guid: " + guid);
// return the Id for created instance with guid
......@@ -362,7 +359,7 @@ public class QuickStart {
return createInstance(referenceable);
}
Id loadProcess(String name, String user,
Id loadProcess(String name, String description, String user,
List<Id> inputTables,
List<Id> outputTables,
String queryText, String queryPlan,
......@@ -370,6 +367,7 @@ public class QuickStart {
String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("description", description);
referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis());
referenceable.set("endTime", System.currentTimeMillis() + 10000);
......@@ -465,6 +463,8 @@ public class QuickStart {
*/
"Table where name=\"sales_fact\", columns",
"Table where name=\"sales_fact\", columns as column select column.name, column.dataType, column.comment",
"from DataSet",
"from Process",
};
}
......
......@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.web.filters;
import com.google.inject.Singleton;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.security.SecurityProperties;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
......@@ -44,7 +45,6 @@ import java.util.Properties;
public class MetadataAuthenticationFilter extends AuthenticationFilter {
private static final Logger LOG = LoggerFactory.getLogger(MetadataAuthenticationFilter.class);
static final String PREFIX = "metadata.http.authentication.";
static final String BIND_ADDRESS = "metadata.server.bind.address";
@Override
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException {
......@@ -77,7 +77,7 @@ public class MetadataAuthenticationFilter extends AuthenticationFilter {
}
//Resolve _HOST into bind address
String bindAddress = config.getProperty(BIND_ADDRESS);
String bindAddress = configuration.getString(SecurityProperties.BIND_ADDRESS);
if (bindAddress == null) {
LOG.info("No host name configured. Defaulting to local host name.");
try {
......
......@@ -31,6 +31,7 @@ import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore;
......@@ -83,7 +84,7 @@ public class GuiceServletConfig extends GuiceServletContextListener {
Map<String, String> params = new HashMap<>();
params.put(PackagesResourceConfig.PROPERTY_PACKAGES, packages);
serve("/api/metadata/*").with(GuiceContainer.class, params);
serve("/" + MetadataServiceClient.BASE_URI + "*").with(GuiceContainer.class, params);
}
private void configureAuthenticationFilter() throws ConfigurationException {
......
......@@ -21,14 +21,17 @@ import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.security.SecurityProperties;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ServletException;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
/**
* A class capable of performing a simple or kerberos login.
......@@ -73,17 +76,30 @@ public class LoginProcessor {
if (authenticationMethod == UserGroupInformation.AuthenticationMethod.SIMPLE) {
UserGroupInformation.loginUserFromSubject(null);
} else if (authenticationMethod == UserGroupInformation.AuthenticationMethod.KERBEROS) {
String bindAddress = getHostname(configuration);
UserGroupInformation.loginUserFromKeytab(
getServerPrincipal(configuration.getString(AUTHENTICATION_PRINCIPAL)),
getServerPrincipal(configuration.getString(AUTHENTICATION_PRINCIPAL), bindAddress),
configuration.getString(AUTHENTICATION_KEYTAB));
}
LOG.info("Logged in user {}", UserGroupInformation.getLoginUser());
} catch (IOException e) {
throw new IllegalStateException(String.format("Unable to perform %s login.", authenticationMethod), e);
}
}
private String getHostname(PropertiesConfiguration configuration) {
String bindAddress = configuration.getString(SecurityProperties.BIND_ADDRESS);
if (bindAddress == null) {
LOG.info("No host name configured. Defaulting to local host name.");
try {
bindAddress = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
throw new IllegalStateException(e);
}
}
return bindAddress;
}
protected void setupHadoopConfiguration(Configuration hadoopConfig, PropertiesConfiguration configuration) {
String authMethod;
authMethod = configuration != null ? configuration.getString(AUTHENTICATION_METHOD) : null;
......@@ -104,8 +120,8 @@ public class LoginProcessor {
* @return the service principal.
* @throws IOException
*/
private String getServerPrincipal(String principal) throws IOException {
return SecurityUtil.getServerPrincipal(principal, InetAddress.getLocalHost().getHostName());
private String getServerPrincipal(String principal, String host) throws IOException {
return SecurityUtil.getServerPrincipal(principal, host);
}
/**
......
......@@ -33,12 +33,12 @@ import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.*;
import java.io.IOException;
import java.net.URI;
import java.util.List;
/**
* Entity management operations as REST API.
*
......@@ -50,12 +50,13 @@ import java.util.List;
public class EntityResource {
private static final Logger LOG = LoggerFactory.getLogger(EntityResource.class);
private static final String GUID = "GUID";
private static final String TRAIT_NAME = "traitName";
private final MetadataService metadataService;
@Context
UriInfo uriInfo;
/**
* Created by the Guice ServletModule and injected with the
* configured MetadataService.
......@@ -79,11 +80,16 @@ public class EntityResource {
LOG.debug("submitting entity {} ", entity);
final String guid = metadataService.createEntity(entity);
UriBuilder ub = uriInfo.getAbsolutePathBuilder();
URI locationURI = ub.path(guid).build();
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.RESULTS, guid);
response.put(MetadataServiceClient.GUID, guid);
response.put(MetadataServiceClient.DEFINITION, entity);
return Response.ok(response).build();
return Response.created(locationURI).entity(response).build();
} catch (MetadataException | IOException | IllegalArgumentException e) {
LOG.error("Unable to persist entity instance", e);
throw new WebApplicationException(
......@@ -104,38 +110,32 @@ public class EntityResource {
@Path("{guid}")
@Produces(MediaType.APPLICATION_JSON)
public Response getEntityDefinition(@PathParam("guid") String guid) {
Preconditions.checkNotNull(guid, "Entity GUID cannot be null");
try {
LOG.debug("Fetching entity definition for guid={} ", guid);
final String entityDefinition = metadataService.getEntityDefinition(guid);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(GUID, guid);
response.put(MetadataServiceClient.GUID, guid);
Response.Status status = Response.Status.NOT_FOUND;
if (entityDefinition != null) {
response.put(MetadataServiceClient.RESULTS, entityDefinition);
response.put(MetadataServiceClient.DEFINITION, entityDefinition);
status = Response.Status.OK;
} else {
response.put(MetadataServiceClient.ERROR, JSONObject.quote(String.format("An entity with GUID={%s} does not exist", guid)));
}
return Response.status(status).entity(response).build();
} catch (MetadataException | IllegalArgumentException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(e, Response
.status(Response.Status.NOT_FOUND)
.entity(e.getMessage())
.type(MediaType.APPLICATION_JSON)
.build());
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (JSONException e) {
LOG.error("Unable to get instance definition for GUID {}", guid, e);
throw new WebApplicationException(e, Response
.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity(e.getMessage())
.type(MediaType.APPLICATION_JSON)
.build());
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -143,16 +143,13 @@ public class EntityResource {
* Gets the list of entities for a given entity type.
*
* @param entityType name of a type which is unique
* @param offset starting offset for pagination
* @param resultsPerPage number of results for pagination
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getEntityListByType(@QueryParam("type") String entityType,
@DefaultValue("0") @QueryParam("offset") Integer offset,
@QueryParam("numResults") Integer resultsPerPage) {
Preconditions.checkNotNull(entityType, "Entity type cannot be null");
public Response getEntityListByType(@QueryParam("type") String entityType) {
try {
Preconditions.checkNotNull(entityType, "Entity type cannot be null");
LOG.debug("Fetching entity list for type={} ", entityType);
final List<String> entityList = metadataService.getEntityList(entityType);
......@@ -160,9 +157,13 @@ public class EntityResource {
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put("type", entityType);
response.put(MetadataServiceClient.RESULTS, new JSONArray(entityList));
response.put(MetadataServiceClient.TOTAL_SIZE, entityList.size());
response.put(MetadataServiceClient.COUNT, entityList.size());
return Response.ok(response).build();
} catch (NullPointerException e) {
LOG.error("Entity type cannot be null", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for type {}", entityType, e);
throw new WebApplicationException(
......@@ -215,17 +216,15 @@ public class EntityResource {
@Path("{guid}/traits")
@Produces(MediaType.APPLICATION_JSON)
public Response getTraitNames(@PathParam("guid") String guid) {
Preconditions.checkNotNull(guid, "Entity GUID cannot be null");
try {
LOG.debug("Fetching trait names for entity={}", guid);
final List<String> traitNames = metadataService.getTraitNames(guid);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(GUID, guid);
response.put(MetadataServiceClient.GUID, guid);
response.put(MetadataServiceClient.RESULTS, new JSONArray(traitNames));
response.put(MetadataServiceClient.TOTAL_SIZE, traitNames.size());
response.put(MetadataServiceClient.COUNT, traitNames.size());
return Response.ok(response).build();
} catch (MetadataException | IllegalArgumentException e) {
......@@ -250,19 +249,20 @@ public class EntityResource {
@Produces(MediaType.APPLICATION_JSON)
public Response addTrait(@Context HttpServletRequest request,
@PathParam("guid") String guid) {
Preconditions.checkNotNull(guid, "Entity GUID cannot be null");
try {
final String traitDefinition = Servlets.getRequestPayload(request);
LOG.debug("Adding trait={} for entity={} ", traitDefinition, guid);
metadataService.addTrait(guid, traitDefinition);
UriBuilder ub = uriInfo.getAbsolutePathBuilder();
URI locationURI = ub.path(guid).build();
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(GUID, guid);
response.put("traitInstance", traitDefinition);
response.put(MetadataServiceClient.GUID, guid);
response.put(MetadataServiceClient.DEFINITION, traitDefinition);
return Response.ok(response).build();
return Response.created(locationURI).entity(response).build();
} catch (MetadataException | IOException | IllegalArgumentException e) {
LOG.error("Unable to add trait for entity={}", guid, e);
throw new WebApplicationException(
......@@ -287,16 +287,13 @@ public class EntityResource {
public Response deleteTrait(@Context HttpServletRequest request,
@PathParam("guid") String guid,
@PathParam(TRAIT_NAME) String traitName) {
Preconditions.checkNotNull(guid, "Entity GUID cannot be null");
Preconditions.checkNotNull(traitName, "Trait name cannot be null");
LOG.debug("Deleting trait={} from entity={} ", traitName, guid);
try {
metadataService.deleteTrait(guid, traitName);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(GUID, guid);
response.put(MetadataServiceClient.GUID, guid);
response.put(TRAIT_NAME, traitName);
return Response.ok(response).build();
......
......@@ -176,7 +176,7 @@ public class HiveLineageResource {
LOG.info("Fetching lineage outputs graph for tableName={}", tableName);
try {
final String jsonResult = lineageService.getOutputs(tableName);
final String jsonResult = lineageService.getOutputsGraph(tableName);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
......
......@@ -165,7 +165,7 @@ public class RexsterGraphResource {
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.RESULTS, new JSONObject(vertexProperties));
response.put(MetadataServiceClient.TOTAL_SIZE, vertexProperties.size());
response.put(MetadataServiceClient.COUNT, vertexProperties.size());
return Response.ok(response).build();
} catch (JSONException e) {
throw new WebApplicationException(
......@@ -276,7 +276,7 @@ public class RexsterGraphResource {
if (!countOnly) {
response.put(MetadataServiceClient.RESULTS, elementArray);
}
response.put(MetadataServiceClient.TOTAL_SIZE, counter);
response.put(MetadataServiceClient.COUNT, counter);
return Response.ok(response).build();
}
......@@ -298,7 +298,7 @@ public class RexsterGraphResource {
String message = "Edge with [" + edgeId + "] cannot be found.";
LOG.info(message);
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity(JSONObject.quote(message)).build());
.entity(Servlets.escapeJsonString(message)).build());
}
JSONObject response = new JSONObject();
......@@ -323,7 +323,7 @@ public class RexsterGraphResource {
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.RESULTS, vertexArray);
response.put(MetadataServiceClient.TOTAL_SIZE, counter);
response.put(MetadataServiceClient.COUNT, counter);
return response;
}
......@@ -389,7 +389,7 @@ public class RexsterGraphResource {
countOnly = false;
} else {
throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
.entity(JSONObject.quote(directionSegment + " segment was invalid."))
.entity(Servlets.escapeJsonString(directionSegment + " segment was invalid."))
.build());
}
}
......
......@@ -18,7 +18,7 @@
package org.apache.hadoop.metadata.web.resources;
import com.google.common.base.Preconditions;
import com.sun.jersey.api.client.ClientResponse;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.services.MetadataService;
......@@ -33,11 +33,22 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* This class provides RESTful API for Types.
......@@ -74,13 +85,22 @@ public class TypesResource {
final String typeDefinition = Servlets.getRequestPayload(request);
LOG.debug("creating type with definition {} ", typeDefinition);
JSONObject typesAdded = metadataService.createType(typeDefinition);
JSONObject typesJson = metadataService.createType(typeDefinition);
final JSONArray typesJsonArray = typesJson.getJSONArray(MetadataServiceClient.TYPES);
List<Map<String, String>> typesAddedList = new ArrayList<>();
for (int i = 0; i < typesJsonArray.length(); i++) {
final String name = typesJsonArray.getString(i);
typesAddedList.add(
new HashMap<String, String>() {{
put(MetadataServiceClient.NAME, name);
}});
}
JSONObject response = new JSONObject();
response.put("types", typesAdded);
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build();
response.put(MetadataServiceClient.TYPES, typesAddedList);
return Response.status(ClientResponse.Status.CREATED).entity(response).build();
} catch (Exception e) {
LOG.error("Unable to persist types", e);
throw new WebApplicationException(
......@@ -103,7 +123,7 @@ public class TypesResource {
JSONObject response = new JSONObject();
response.put("typeName", typeName);
response.put("definition", typeDefinition);
response.put(MetadataServiceClient.DEFINITION, typeDefinition);
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build();
......@@ -120,13 +140,18 @@ public class TypesResource {
/**
* Gets the list of trait type names registered in the type system.
*
* @param type type should be the name of enum
* org.apache.hadoop.metadata.typesystem.types.DataTypes.TypeCategory
* Typically, would be one of all, TRAIT, CLASS, ENUM, STRUCT
* @return entity names response payload as json
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getTypesByFilter(@Context HttpServletRequest request,
@DefaultValue(TYPE_ALL) @QueryParam("type") String type) {
try {
List<String> result = null;
List<String> result;
if (TYPE_ALL.equals(type)) {
result = metadataService.getTypeNamesList();
} else {
......@@ -136,11 +161,11 @@ public class TypesResource {
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.RESULTS, new JSONArray(result));
response.put(MetadataServiceClient.TOTAL_SIZE, result.size());
response.put(MetadataServiceClient.COUNT, result.size());
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build();
} catch(IllegalArgumentException ie) {
} catch (IllegalArgumentException ie) {
LOG.error("Unsupported typeName while retrieving type list {}", type);
throw new WebApplicationException(
Servlets.getErrorResponse("Unsupported type " + type, Response.Status.BAD_REQUEST));
......
......@@ -18,14 +18,22 @@
package org.apache.hadoop.metadata.web.util;
import com.google.common.base.Preconditions;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
/**
......@@ -33,6 +41,9 @@ import java.io.StringWriter;
*/
public final class Servlets {
public static final String QUOTE = "\"";
private static final Logger LOG = LoggerFactory.getLogger(Servlets.class);
private Servlets() {
/* singleton */
}
......@@ -93,13 +104,34 @@ public final class Servlets {
}
public static Response getErrorResponse(Throwable e, Response.Status status) {
return getErrorResponse(e.getMessage(), status);
Response response = getErrorResponse(e.getMessage(), status);
JSONObject responseJson = (JSONObject) response.getEntity();
try {
responseJson.put(MetadataServiceClient.STACKTRACE, printStackTrace(e));
} catch (JSONException e1) {
LOG.warn("Could not construct error Json rensponse", e1);
}
return response;
}
private static String printStackTrace(Throwable t) {
StringWriter sw = new StringWriter();
t.printStackTrace(new PrintWriter(sw));
return sw.toString();
}
public static Response getErrorResponse(String message, Response.Status status) {
JSONObject errorJson = new JSONObject();
Object errorEntity = Servlets.escapeJsonString(message);
try {
errorJson.put(MetadataServiceClient.ERROR, errorEntity);
errorEntity = errorJson;
} catch (JSONException jsonE) {
LOG.warn("Could not construct error Json rensponse", jsonE);
}
return Response
.status(status)
.entity(JSONObject.quote(message))
.entity(errorEntity)
.type(MediaType.APPLICATION_JSON)
.build();
}
......@@ -113,4 +145,9 @@ public final class Servlets {
public static String getRequestId() {
return Thread.currentThread().getName();
}
public static String escapeJsonString(String inputStr) {
ParamChecker.notNull(inputStr, "Input String cannot be null");
return StringEscapeUtils.escapeJson(inputStr);
}
}
......@@ -29,7 +29,6 @@ metadata.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs #########
# This models follows the quick-start guide
metadata.lineage.hive.table.type.name=hive_table
metadata.lineage.hive.column.type.name=hive_column
metadata.lineage.hive.table.column.name=columns
......
......@@ -76,7 +76,7 @@ public abstract class BaseResourceIT {
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, typesAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
Assert.assertEquals(clientResponse.getStatus(), Response.Status.CREATED.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
......@@ -93,7 +93,7 @@ public abstract class BaseResourceIT {
String entityJSON = InstanceSerialization.toJson(referenceable, true);
System.out.println("Submitting new entity= " + entityJSON);
JSONObject jsonObject = serviceClient.createEntity(entityJSON);
String guid = jsonObject.getString(MetadataServiceClient.RESULTS);
String guid = jsonObject.getString(MetadataServiceClient.GUID);
System.out.println("created instance for type " + typeName + ", guid: " + guid);
// return the reference to created instance with guid
......
......@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.web.resources;
import com.google.common.collect.ImmutableList;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct;
......@@ -147,7 +148,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
final String definition = response.getString(MetadataServiceClient.RESULTS);
final String definition = response.getString(MetadataServiceClient.DEFINITION);
Assert.assertNotNull(definition);
LOG.debug("tableInstanceAfterGet = " + definition);
InstanceSerialization.fromJsonReferenceable(definition, true);
......@@ -176,7 +177,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
private String getEntityDefinition(ClientResponse clientResponse) throws Exception {
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
JSONObject response = new JSONObject(clientResponse.getEntity(String.class));
final String definition = response.getString(MetadataServiceClient.RESULTS);
final String definition = response.getString(MetadataServiceClient.DEFINITION);
Assert.assertNotNull(definition);
return definition;
......@@ -196,6 +197,10 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.ERROR));
Assert.assertNotNull(response.get(MetadataServiceClient.STACKTRACE));
}
@Test(dependsOnMethods = "testSubmitEntity")
......@@ -227,11 +232,14 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(),
Response.Status.BAD_REQUEST.getStatusCode());
Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.ERROR));
Assert.assertNotNull(response.get(MetadataServiceClient.STACKTRACE));
}
@Test
......@@ -311,15 +319,15 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
Assert.assertEquals(clientResponse.getStatus(), Response.Status.CREATED.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
Assert.assertNotNull(response.get("GUID"));
Assert.assertNotNull(response.get("traitInstance"));
Assert.assertNotNull(response.get(MetadataServiceClient.GUID));
Assert.assertNotNull(response.get(MetadataServiceClient.DEFINITION));
}
@Test
......@@ -341,8 +349,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.POST, ClientResponse.class, traitInstanceAsJSON);
Assert.assertEquals(clientResponse.getStatus(),
Response.Status.BAD_REQUEST.getStatusCode());
Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
}
@Test (dependsOnMethods = "testAddTrait")
......@@ -381,8 +388,43 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.DELETE, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(),
Response.Status.BAD_REQUEST.getStatusCode());
Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.ERROR));
Assert.assertEquals(response.getString(MetadataServiceClient.ERROR), "trait=" + traitName + " should be defined in type system before it can be deleted");
Assert.assertNotNull(response.get(MetadataServiceClient.STACKTRACE));
}
private String random() {
return RandomStringUtils.random(10);
}
@Test
public void testUTF8() throws Exception {
String classType = random();
String attrName = random();
String attrValue = random();
HierarchicalTypeDefinition<ClassType> classTypeDefinition =
TypesUtil.createClassTypeDef(classType, ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef(attrName, DataTypes.STRING_TYPE));
TypesDef typesDef = TypeUtils.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
ImmutableList.of(classTypeDefinition));
createType(typesDef);
Referenceable instance = new Referenceable(classType);
instance.set(attrName, attrValue);
Id guid = createInstance(instance);
ClientResponse response = getEntityDefinition(guid._getId());
String definition = getEntityDefinition(response);
Referenceable getReferenceable = InstanceSerialization.fromJsonReferenceable(definition, true);
Assert.assertEquals(getReferenceable.get(attrName), attrValue);
}
private void createHiveTypes() throws Exception {
......
......@@ -95,6 +95,40 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
}
@Test
public void testInputsGraph() throws Exception {
WebResource resource = service
.path(BASE_URI)
.path("sales_fact_monthly_mv")
.path("inputs")
.path("graph");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
System.out.println("inputs graph = " + responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
JSONObject results = response.getJSONObject(MetadataServiceClient.RESULTS);
Assert.assertNotNull(results);
JSONObject values = results.getJSONObject("values");
Assert.assertNotNull(values);
final JSONObject vertices = values.getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 4);
final JSONObject edges = values.getJSONObject("edges");
Assert.assertEquals(edges.length(), 4);
}
@Test
public void testOutputs() throws Exception {
WebResource resource = service
.path(BASE_URI)
......@@ -126,6 +160,40 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
}
@Test
public void testOutputsGraph() throws Exception {
WebResource resource = service
.path(BASE_URI)
.path("sales_fact")
.path("outputs")
.path("graph");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
System.out.println("outputs graph= " + responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
JSONObject results = response.getJSONObject(MetadataServiceClient.RESULTS);
Assert.assertNotNull(results);
JSONObject values = results.getJSONObject("values");
Assert.assertNotNull(values);
final JSONObject vertices = values.getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 3);
final JSONObject edges = values.getJSONObject("edges");
Assert.assertEquals(edges.length(), 4);
}
@Test
public void testSchema() throws Exception {
WebResource resource = service
.path(BASE_URI)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment