Commit e8fca05b by Suma Shivaprasad

Fixed indexing failures for specific types and due to missing transaction…

Fixed indexing failures for specific types and due to missing transaction closures. Todo - Nested transactions with write lock on graph and nested read transaction in DMS.createType are not working with bdb
parents 3ecb6d11 3d9b865d
......@@ -37,6 +37,7 @@ package org.apache.hadoop.metadata.hive.hook;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.ExplainTask;
......@@ -246,10 +247,9 @@ public class HiveHook implements ExecuteWithHookContext {
Referenceable dbReferenceable = dgiBridge.registerDatabase(oldTable.getDbName());
Referenceable tableReferenceable =
dgiBridge.registerTable(dbReferenceable, oldTable.getDbName(), oldTable.getTableName());
LOG.info("Updating entity name {}.{} to {}",
oldTable.getDbName(), oldTable.getTableName(), newTable.getTableName());
dgiBridge.getMetadataServiceClient().updateEntity(tableReferenceable.getId()._getId(), "name",
newTable.getTableName().toLowerCase());
LOG.info("Updating entity name {}.{} to {}", oldTable.getDbName(), oldTable.getTableName(),
newTable.getTableName());
dgiBridge.updateTable(tableReferenceable, newTable);
}
private void handleCreateTable(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception {
......@@ -271,6 +271,13 @@ public class HiveHook implements ExecuteWithHookContext {
}
}
private String normalize(String str) {
if (StringUtils.isEmpty(str)) {
return null;
}
return str.toLowerCase().trim();
}
private void registerProcess(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception {
Set<ReadEntity> inputs = event.inputs;
Set<WriteEntity> outputs = event.outputs;
......@@ -285,48 +292,53 @@ public class HiveHook implements ExecuteWithHookContext {
}
String queryId = event.queryPlan.getQueryId();
String queryStr = event.queryPlan.getQueryStr();
String queryStr = normalize(event.queryPlan.getQueryStr());
long queryStartTime = event.queryPlan.getQueryStartTime();
LOG.debug("Registering CTAS query: {}", queryStr);
Referenceable processReferenceable = new Referenceable(HiveDataTypes.HIVE_PROCESS.getName());
processReferenceable.set("name", event.operation.getOperationName());
processReferenceable.set("startTime", queryStartTime);
processReferenceable.set("userName", event.user);
List<Referenceable> source = new ArrayList<>();
for (ReadEntity readEntity : inputs) {
if (readEntity.getType() == Entity.Type.TABLE) {
Table table = readEntity.getTable();
String dbName = table.getDbName();
source.add(dgiBridge.registerTable(dbName, table.getTableName()));
}
if (readEntity.getType() == Entity.Type.PARTITION) {
dgiBridge.registerPartition(readEntity.getPartition());
}
}
processReferenceable.set("inputs", source);
List<Referenceable> target = new ArrayList<>();
for (WriteEntity writeEntity : outputs) {
if (writeEntity.getType() == Entity.Type.TABLE || writeEntity.getType() == Entity.Type.PARTITION) {
Table table = writeEntity.getTable();
String dbName = table.getDbName();
target.add(dgiBridge.registerTable(dbName, table.getTableName()));
Referenceable processReferenceable = dgiBridge.getProcessReference(queryStr);
if (processReferenceable == null) {
processReferenceable = new Referenceable(HiveDataTypes.HIVE_PROCESS.getName());
processReferenceable.set("name", event.operation.getOperationName());
processReferenceable.set("startTime", queryStartTime);
processReferenceable.set("userName", event.user);
List<Referenceable> source = new ArrayList<>();
for (ReadEntity readEntity : inputs) {
if (readEntity.getType() == Entity.Type.TABLE) {
Table table = readEntity.getTable();
String dbName = table.getDbName();
source.add(dgiBridge.registerTable(dbName, table.getTableName()));
}
if (readEntity.getType() == Entity.Type.PARTITION) {
dgiBridge.registerPartition(readEntity.getPartition());
}
}
if (writeEntity.getType() == Entity.Type.PARTITION) {
dgiBridge.registerPartition(writeEntity.getPartition());
processReferenceable.set("inputs", source);
List<Referenceable> target = new ArrayList<>();
for (WriteEntity writeEntity : outputs) {
if (writeEntity.getType() == Entity.Type.TABLE || writeEntity.getType() == Entity.Type.PARTITION) {
Table table = writeEntity.getTable();
String dbName = table.getDbName();
target.add(dgiBridge.registerTable(dbName, table.getTableName()));
}
if (writeEntity.getType() == Entity.Type.PARTITION) {
dgiBridge.registerPartition(writeEntity.getPartition());
}
}
processReferenceable.set("outputs", target);
processReferenceable.set("queryText", queryStr);
processReferenceable.set("queryId", queryId);
processReferenceable.set("queryPlan", event.jsonPlan.toString());
processReferenceable.set("endTime", System.currentTimeMillis());
//TODO set
processReferenceable.set("queryGraph", "queryGraph");
dgiBridge.createInstance(processReferenceable);
} else {
LOG.debug("Query {} is already registered", queryStr);
}
processReferenceable.set("outputs", target);
processReferenceable.set("queryText", queryStr);
processReferenceable.set("queryId", queryId);
processReferenceable.set("queryPlan", event.jsonPlan.toString());
processReferenceable.set("endTime", System.currentTimeMillis());
//TODO set
processReferenceable.set("queryGraph", "queryGraph");
dgiBridge.createInstance(processReferenceable);
}
......
......@@ -27,11 +27,9 @@ public enum HiveDataTypes {
HIVE_OBJECT_TYPE,
HIVE_PRINCIPAL_TYPE,
HIVE_RESOURCE_TYPE,
HIVE_FUNCTION_TYPE,
// Structs
HIVE_SERDE,
HIVE_SKEWEDINFO,
HIVE_ORDER,
HIVE_RESOURCEURI,
......@@ -42,7 +40,6 @@ public enum HiveDataTypes {
HIVE_COLUMN,
HIVE_PARTITION,
HIVE_INDEX,
HIVE_FUNCTION,
HIVE_ROLE,
HIVE_TYPE,
HIVE_PROCESS,
......
......@@ -19,6 +19,8 @@
package org.apache.hadoop.metadata.hive.hook;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.ql.Driver;
......@@ -119,8 +121,8 @@ public class HiveHookIT {
private String createTable(boolean partition) throws Exception {
String tableName = tableName();
runCommand("create table " + tableName + "(id int, name string) comment 'table comment' " + (partition ? " partitioned by(dt string)"
: ""));
runCommand("create table " + tableName + "(id int, name string) comment 'table comment' "
+ (partition ? " partitioned by(dt string)" : ""));
return tableName;
}
......@@ -139,6 +141,9 @@ public class HiveHookIT {
Referenceable tableRef = dgiCLient.getEntity(tableId);
Assert.assertEquals(tableRef.get("tableType"), TableType.MANAGED_TABLE.name());
Assert.assertEquals(tableRef.get(HiveDataModelGenerator.COMMENT), "table comment");
String entityName = HiveMetaStoreBridge.getTableName(CLUSTER_NAME, DEFAULT_DB, tableName);
Assert.assertEquals(tableRef.get(HiveDataModelGenerator.NAME), entityName);
final Id sdId = (Id) tableRef.get("sd");
Referenceable sdRef = dgiCLient.getEntity(sdId.id);
Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS),false);
......@@ -222,7 +227,7 @@ public class HiveHookIT {
String tableName = createTable(false);
String filename = "pfile://" + mkdir("export");
String query = "export table " + tableName + " to '" + filename + "'";
String query = "export table " + tableName + " to \"" + filename + "\"";
runCommand(query);
assertProcessIsRegistered(query);
......@@ -239,6 +244,11 @@ public class HiveHookIT {
String query = "select * from " + tableName;
runCommand(query);
assertProcessIsRegistered(query);
//single entity per query
query = "SELECT * from " + tableName.toUpperCase();
runCommand(query);
assertProcessIsRegistered(query);
}
@Test
......@@ -268,8 +278,23 @@ public class HiveHookIT {
}
private void assertProcessIsRegistered(String queryStr) throws Exception {
String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(), queryStr);
assertEntityIsRegistered(dslQuery, true);
// String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
// normalize(queryStr));
// assertEntityIsRegistered(dslQuery, true);
//todo replace with DSL
String typeName = HiveDataTypes.HIVE_PROCESS.getName();
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()",
typeName, typeName, normalize(queryStr));
JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
Assert.assertEquals(results.length(), 1);
}
private String normalize(String str) {
if (StringUtils.isEmpty(str)) {
return null;
}
return StringEscapeUtils.escapeJava(str.toLowerCase());
}
private String assertTableIsRegistered(String dbName, String tableName) throws Exception {
......@@ -282,7 +307,7 @@ public class HiveHookIT {
private String assertTableIsRegistered(String dbName, String tableName, boolean registered) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName);
String query = String.format("%s as t where name = '%s', dbName where name = '%s' and clusterName = '%s'"
String query = String.format("%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'"
+ " select t", HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(),
CLUSTER_NAME);
return assertEntityIsRegistered(query, registered);
......@@ -299,14 +324,13 @@ public class HiveHookIT {
String typeName = HiveDataTypes.HIVE_PARTITION.getName();
String dbType = HiveDataTypes.HIVE_DB.getName();
String tableType = HiveDataTypes.HIVE_TABLE.getName();
String datasetType = MetadataServiceClient.DATA_SET_SUPER_TYPE;
LOG.debug("Searching for partition of {}.{} with values {}", dbName, tableName, value);
//todo replace with DSL
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.values', ['%s']).as('p')."
+ "out('__%s.tableName').has('%s.name', '%s').out('__%s.dbName').has('%s.name', '%s')"
+ "out('__%s.table').has('%s.tableName', '%s').out('__%s.db').has('%s.name', '%s')"
+ ".has('%s.clusterName', '%s').back('p').toList()", typeName, typeName, value, typeName,
datasetType, tableName.toLowerCase(), tableType, dbType, dbName.toLowerCase(), dbType, CLUSTER_NAME);
tableType, tableName.toLowerCase(), tableType, dbType, dbName.toLowerCase(), dbType, CLUSTER_NAME);
JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
Assert.assertEquals(results.length(), 1);
......@@ -327,4 +351,29 @@ public class HiveHookIT {
return null;
}
}
@Test
public void testLineage() throws Exception {
String table1 = createTable(false);
String db2 = createDatabase();
String table2 = tableName();
String query = String.format("create table %s.%s as select * from %s", db2, table2, table1);
runCommand(query);
String table1Id = assertTableIsRegistered(DEFAULT_DB, table1);
String table2Id = assertTableIsRegistered(db2, table2);
String datasetName = HiveMetaStoreBridge.getTableName(CLUSTER_NAME, db2, table2);
JSONObject response = dgiCLient.getInputGraph(datasetName);
JSONObject vertices = response.getJSONObject("values").getJSONObject("vertices");
Assert.assertTrue(vertices.has(table1Id));
Assert.assertTrue(vertices.has(table2Id));
datasetName = HiveMetaStoreBridge.getTableName(CLUSTER_NAME, DEFAULT_DB, table1);
response = dgiCLient.getOutputGraph(datasetName);
vertices = response.getJSONObject("values").getJSONObject("vertices");
Assert.assertTrue(vertices.has(table1Id));
Assert.assertTrue(vertices.has(table2Id));
}
}
......@@ -64,6 +64,7 @@ public class MetadataServiceClient {
public static final String URI_ENTITIES = "entities";
public static final String URI_TRAITS = "traits";
public static final String URI_SEARCH = "discovery/search";
public static final String URI_LINEAGE = "lineage/hive/table";
public static final String QUERY = "query";
public static final String QUERY_TYPE = "queryType";
......@@ -75,6 +76,8 @@ public class MetadataServiceClient {
public static final String DATA_SET_SUPER_TYPE = "DataSet";
public static final String PROCESS_SUPER_TYPE = "Process";
public static final String JSON_MEDIA_TYPE = MediaType.APPLICATION_JSON + "; charset=UTF-8";
private WebResource service;
public MetadataServiceClient(String baseUrl) {
......@@ -82,7 +85,7 @@ public class MetadataServiceClient {
PropertiesConfiguration clientConfig = null;
try {
clientConfig = getClientProperties();
if (clientConfig.getBoolean(TLS_ENABLED) || clientConfig.getString("metadata.http.authentication.type") != null) {
if (clientConfig.getBoolean(TLS_ENABLED, false)) {
// create an SSL properties configuration if one doesn't exist. SSLFactory expects a file, so forced to create a
// configuration object, persist it, then subsequently pass in an empty configuration to SSLFactory
SecureClientUtils.persistSSLClientConfiguration(clientConfig);
......@@ -126,7 +129,12 @@ public class MetadataServiceClient {
SEARCH(BASE_URI + URI_SEARCH, HttpMethod.GET),
SEARCH_DSL(BASE_URI + URI_SEARCH + "/dsl", HttpMethod.GET),
SEARCH_GREMLIN(BASE_URI + URI_SEARCH + "/gremlin", HttpMethod.GET),
SEARCH_FULL_TEXT(BASE_URI + URI_SEARCH + "/fulltext", HttpMethod.GET);
SEARCH_FULL_TEXT(BASE_URI + URI_SEARCH + "/fulltext", HttpMethod.GET),
//Lineage operations
LINEAGE_INPUTS_GRAPH(BASE_URI + URI_LINEAGE, HttpMethod.GET),
LINEAGE_OUTPUTS_GRAPH(BASE_URI + URI_LINEAGE, HttpMethod.GET),
LINEAGE_SCHEMA(BASE_URI + URI_LINEAGE, HttpMethod.GET);
private final String method;
private final String path;
......@@ -145,6 +153,16 @@ public class MetadataServiceClient {
}
}
/**
* Register the given type(meta model)
* @param typeAsJson type definition a jaon
* @return result json object
* @throws MetadataServiceException
*/
public JSONObject createType(String typeAsJson) throws MetadataServiceException {
return callAPI(API.CREATE_TYPE, typeAsJson);
}
public List<String> listTypes() throws MetadataServiceException {
try {
final JSONObject jsonObject = callAPI(API.LIST_TYPES, null);
......@@ -176,16 +194,6 @@ public class MetadataServiceClient {
}
/**
* Register the given type(meta model)
* @param typeAsJson type definition a jaon
* @return result json object
* @throws MetadataServiceException
*/
public JSONObject createType(String typeAsJson) throws MetadataServiceException {
return callAPI(API.CREATE_TYPE, typeAsJson);
}
/**
* Create the given entity
* @param entityAsJson entity(type instance) as json
* @return result json object
......@@ -213,9 +221,9 @@ public class MetadataServiceClient {
/**
* Updates property for the entity corresponding to guid
* @param guid
* @param property
* @param value
* @param guid guid
* @param property property key
* @param value property value
*/
public JSONObject updateEntity(String guid, String property, String value) throws MetadataServiceException {
WebResource resource = getResource(API.UPDATE_ENTITY, guid);
......@@ -255,6 +263,7 @@ public class MetadataServiceClient {
* @throws MetadataServiceException
*/
public JSONArray searchByDSL(String query) throws MetadataServiceException {
LOG.debug("DSL query: {}", query);
WebResource resource = getResource(API.SEARCH_DSL);
resource = resource.queryParam(QUERY, query);
JSONObject result = callAPIWithResource(API.SEARCH_DSL, resource);
......@@ -272,6 +281,7 @@ public class MetadataServiceClient {
* @throws MetadataServiceException
*/
public JSONObject searchByGremlin(String gremlinQuery) throws MetadataServiceException {
LOG.debug("Gremlin query: " + gremlinQuery);
WebResource resource = getResource(API.SEARCH_GREMLIN);
resource = resource.queryParam(QUERY, gremlinQuery);
return callAPIWithResource(API.SEARCH_GREMLIN, resource);
......@@ -289,6 +299,24 @@ public class MetadataServiceClient {
return callAPIWithResource(API.SEARCH_FULL_TEXT, resource);
}
public JSONObject getInputGraph(String datasetName) throws MetadataServiceException {
JSONObject response = callAPI(API.LINEAGE_INPUTS_GRAPH, null, datasetName, "/inputs/graph");
try {
return response.getJSONObject(MetadataServiceClient.RESULTS);
} catch (JSONException e) {
throw new MetadataServiceException(e);
}
}
public JSONObject getOutputGraph(String datasetName) throws MetadataServiceException {
JSONObject response = callAPI(API.LINEAGE_OUTPUTS_GRAPH, null, datasetName, "/outputs/graph");
try {
return response.getJSONObject(MetadataServiceClient.RESULTS);
} catch (JSONException e) {
throw new MetadataServiceException(e);
}
}
public String getRequestId(JSONObject json) throws MetadataServiceException {
try {
return json.getString(REQUEST_ID);
......@@ -314,11 +342,11 @@ public class MetadataServiceClient {
private JSONObject callAPIWithResource(API api, WebResource resource, Object requestObject)
throws MetadataServiceException {
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(JSON_MEDIA_TYPE)
.type(JSON_MEDIA_TYPE)
.method(api.getMethod(), ClientResponse.class, requestObject);
Response.Status expectedStatus = (api.getMethod() == HttpMethod.POST)
Response.Status expectedStatus = HttpMethod.POST.equals(api.getMethod())
? Response.Status.CREATED : Response.Status.OK;
if (clientResponse.getStatus() == expectedStatus.getStatusCode()) {
String responseAsString = clientResponse.getEntity(String.class);
......
......@@ -60,3 +60,24 @@ The properties for configuring the SPNEGO support are:
For a more detailed discussion of the HTTP authentication mechanism refer to [[http://hadoop.apache.org/docs/stable/hadoop-auth/Configuration.html][Hadoop Auth, Java HTTP SPNEGO 2.6.0 - Server Side Configuration]]. The prefix that document references is "metadata.http.authentication" in the case of the DGI authentication implementation.
---+++ Client security configuration
When leveraging Atlas client code to communicate with an Atlas server configured for SSL transport and/or Kerberos authentication, there is a requirement to provide a client configuration file that provides the security properties that allow for communication with, or authenticating to, the server.
Create a client.properties file with the appropriate settings (see below) and place it on the client's classpath or in the directory specified by the "metadata.conf" system property.
The client properties for SSL communication are:
* <code>metadata.enableTLS</code> (false|true) [default: false] - enable/disable the SSL client communication infrastructure.
* <code>keystore.file</code> - the path to the keystore file leveraged by the client. This file is only required if 2-Way SSL is enabled at the server and contains the client certificate.
* <code>truststore.file</code> - the path to the truststore file. This file contains the certificates of trusted entities (e.g. the certificates for the server or a shared certification authority). This file is required for both one-way or two-way SSL.
* <code>cert.stores.credential.provider.path</code> - the path to the Credential Provider store file. The passwords for the keystore, truststore, and client certificate are maintained in this secure file.
The property required for authenticating to the server (if authentication is enabled):
* <code>metadata.http.authentication.type</code> (simple|kerberos) [default: simple] - the authentication type
If the authentication type specified is 'kerberos', then the kerberos ticket cache will be accessed for authenticating to the server (Therefore the client is required to authenticate to the KDC prior to communication with the server using 'kinit' or a similar mechanism).
......@@ -30,6 +30,7 @@ import org.apache.hadoop.metadata.query.Expressions;
import org.apache.hadoop.metadata.query.GremlinQueryResult;
import org.apache.hadoop.metadata.query.HiveLineageQuery;
import org.apache.hadoop.metadata.query.HiveWhereUsedQuery;
import org.apache.hadoop.metadata.repository.EntityNotFoundException;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.slf4j.Logger;
......@@ -75,10 +76,10 @@ public class HiveLineageService implements LineageService {
HIVE_TABLE_SCHEMA_QUERY = conf.getString(
"metadata.lineage.hive.table.schema.query",
"hive_table where name=\"?\", columns");
"hive_table where name=\"%s\", columns");
HIVE_TABLE_EXISTS_QUERY = conf.getString(
"metadata.lineage.hive.table.exists.query",
"from hive_table where name=\"?\"");
"from hive_table where name=\"%s\"");
} catch (MetadataException e) {
throw new RuntimeException(e);
}
......@@ -106,7 +107,7 @@ public class HiveLineageService implements LineageService {
*/
@Override
@GraphTransaction
public String getOutputs(String tableName) throws DiscoveryException {
public String getOutputs(String tableName) throws MetadataException {
LOG.info("Fetching lineage outputs for tableName={}", tableName);
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
......@@ -134,7 +135,7 @@ public class HiveLineageService implements LineageService {
*/
@Override
@GraphTransaction
public String getOutputsGraph(String tableName) throws DiscoveryException {
public String getOutputsGraph(String tableName) throws MetadataException {
LOG.info("Fetching lineage outputs graph for tableName={}", tableName);
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
......@@ -155,7 +156,7 @@ public class HiveLineageService implements LineageService {
*/
@Override
@GraphTransaction
public String getInputs(String tableName) throws DiscoveryException {
public String getInputs(String tableName) throws MetadataException {
LOG.info("Fetching lineage inputs for tableName={}", tableName);
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
......@@ -183,7 +184,7 @@ public class HiveLineageService implements LineageService {
*/
@Override
@GraphTransaction
public String getInputsGraph(String tableName) throws DiscoveryException {
public String getInputsGraph(String tableName) throws MetadataException {
LOG.info("Fetching lineage inputs graph for tableName={}", tableName);
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
......@@ -204,12 +205,12 @@ public class HiveLineageService implements LineageService {
*/
@Override
@GraphTransaction
public String getSchema(String tableName) throws DiscoveryException {
public String getSchema(String tableName) throws MetadataException {
LOG.info("Fetching schema for tableName={}", tableName);
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
String schemaQuery = HIVE_TABLE_SCHEMA_QUERY.replace("?", tableName);
final String schemaQuery = String.format(HIVE_TABLE_SCHEMA_QUERY, tableName);
return discoveryService.searchByDSL(schemaQuery);
}
......@@ -218,11 +219,11 @@ public class HiveLineageService implements LineageService {
*
* @param tableName table name
*/
private void validateTableExists(String tableName) throws DiscoveryException {
String tableExistsQuery = HIVE_TABLE_EXISTS_QUERY.replace("?", tableName);
private void validateTableExists(String tableName) throws MetadataException {
final String tableExistsQuery = String.format(HIVE_TABLE_EXISTS_QUERY, tableName);
GremlinQueryResult queryResult = discoveryService.evaluate(tableExistsQuery);
if (!(queryResult.rows().length() > 0)) {
throw new IllegalArgumentException(tableName + " does not exist");
throw new EntityNotFoundException(tableName + " does not exist");
}
}
}
......@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.discovery;
import org.apache.hadoop.metadata.MetadataException;
/**
* Lineage service interface.
*/
......@@ -29,7 +31,7 @@ public interface LineageService {
* @param tableName tableName
* @return Outputs as JSON
*/
String getOutputs(String tableName) throws DiscoveryException;
String getOutputs(String tableName) throws MetadataException;
/**
* Return the lineage outputs graph for the given tableName.
......@@ -37,7 +39,7 @@ public interface LineageService {
* @param tableName tableName
* @return Outputs Graph as JSON
*/
String getOutputsGraph(String tableName) throws DiscoveryException;
String getOutputsGraph(String tableName) throws MetadataException;
/**
* Return the lineage inputs for the given tableName.
......@@ -45,7 +47,7 @@ public interface LineageService {
* @param tableName tableName
* @return Inputs as JSON
*/
String getInputs(String tableName) throws DiscoveryException;
String getInputs(String tableName) throws MetadataException;
/**
* Return the lineage inputs graph for the given tableName.
......@@ -53,7 +55,7 @@ public interface LineageService {
* @param tableName tableName
* @return Inputs Graph as JSON
*/
String getInputsGraph(String tableName) throws DiscoveryException;
String getInputsGraph(String tableName) throws MetadataException;
/**
* Return the schema for the given tableName.
......@@ -61,5 +63,5 @@ public interface LineageService {
* @param tableName tableName
* @return Schema as JSON
*/
String getSchema(String tableName) throws DiscoveryException;
String getSchema(String tableName) throws MetadataException;
}
......@@ -133,12 +133,12 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
if (either.isRight()) {
Expressions.Expression expression = either.right().get();
return evaluate(expression);
} else {
throw new DiscoveryException("Invalid expression : " + dslQuery + ". " + either.left());
}
} catch (Exception e) { // unable to catch ExpressionException
throw new DiscoveryException("Invalid expression : " + dslQuery, e);
}
throw new DiscoveryException("Invalid expression : " + dslQuery);
}
public GremlinQueryResult evaluate(Expressions.Expression expression) {
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.repository;
import org.apache.hadoop.metadata.MetadataException;
/**
* A simple wrapper for 404.
*/
public class EntityNotFoundException extends RepositoryException {
public EntityNotFoundException() {
}
public EntityNotFoundException(String message) {
super(message);
}
public EntityNotFoundException(String message, Throwable cause) {
super(message, cause);
}
public EntityNotFoundException(Throwable cause) {
super(cause);
}
public EntityNotFoundException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
......@@ -130,7 +130,7 @@ public interface MetadataRepository {
* @return a list of trait names for the given entity guid
* @throws RepositoryException
*/
List<String> getTraitNames(String guid) throws RepositoryException;
List<String> getTraitNames(String guid) throws MetadataException;
/**
* Adds a new trait to an existing entity represented by a guid.
......
......@@ -30,6 +30,7 @@ import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.EntityNotFoundException;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.IReferenceableInstance;
......@@ -89,7 +90,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
private final TitanGraph titanGraph;
@Inject
public GraphBackedMetadataRepository(GraphProvider<TitanGraph> graphProvider) throws MetadataException {
public GraphBackedMetadataRepository(
GraphProvider<TitanGraph> graphProvider) throws MetadataException {
this.typeSystem = TypeSystem.getInstance();
this.titanGraph = graphProvider.get();
......@@ -124,7 +126,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
@Override
public String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) throws MetadataException {
public String getFieldNameInVertex(IDataType<?> dataType,
AttributeInfo aInfo) throws MetadataException {
return getQualifiedName(dataType, aInfo.name);
}
......@@ -137,7 +140,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return EDGE_LABEL_PREFIX + typeName + "." + attrName;
}
public String getEdgeLabel(ITypedInstance typedInstance, AttributeInfo aInfo) throws MetadataException {
public String getEdgeLabel(ITypedInstance typedInstance,
AttributeInfo aInfo) throws MetadataException {
IDataType dataType = typeSystem.getDataType(IDataType.class, typedInstance.getTypeName());
return getEdgeLabel(dataType, aInfo);
}
......@@ -158,9 +162,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
public ITypedReferenceableInstance getEntityDefinition(String guid) throws RepositoryException {
LOG.info("Retrieving entity with guid={}", guid);
try {
Vertex instanceVertex = getVertexForGUID(guid);
Vertex instanceVertex = getVertexForGUID(guid);
try {
LOG.debug("Found a vertex {} for guid {}", instanceVertex, guid);
return graphToInstanceMapper.mapGraphToTypedInstance(guid, instanceVertex);
......@@ -169,11 +173,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
}
private Vertex getVertexForGUID(String guid) throws RepositoryException {
private Vertex getVertexForGUID(String guid) throws EntityNotFoundException {
Vertex instanceVertex = GraphHelper.findVertexByGUID(titanGraph, guid);
if (instanceVertex == null) {
LOG.debug("Could not find a vertex for guid={}", guid);
throw new RepositoryException(
throw new EntityNotFoundException(
"Could not find an entity in the repository for guid: " + guid);
}
......@@ -209,7 +213,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
*/
@Override
@GraphTransaction
public List<String> getTraitNames(String guid) throws RepositoryException {
public List<String> getTraitNames(String guid) throws MetadataException {
LOG.info("Retrieving trait names for entity={}", guid);
Vertex instanceVertex = getVertexForGUID(guid);
return getTraitNames(instanceVertex);
......@@ -253,7 +257,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
((TitanVertex) instanceVertex)
.addProperty(Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
} catch (MetadataException e) {
} catch (Exception e) {
throw new RepositoryException(e);
}
}
......@@ -316,21 +320,20 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
@Override
@GraphTransaction
public void updateEntity(String guid, String property, String value) throws RepositoryException {
public void updateEntity(String guid,
String property, String value) throws RepositoryException {
LOG.info("Adding property {} for entity guid {}", property, guid);
try {
Vertex instanceVertex = GraphHelper.findVertexByGUID(titanGraph, guid);
if (instanceVertex == null) {
throw new RepositoryException("Could not find a vertex for guid " + guid);
}
Vertex instanceVertex = getVertexForGUID(guid);
LOG.debug("Found a vertex {} for guid {}", instanceVertex, guid);
String typeName = instanceVertex.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
ClassType type = typeSystem.getDataType(ClassType.class, typeName);
AttributeInfo attributeInfo = type.fieldMapping.fields.get(property);
if (attributeInfo == null) {
throw new MetadataException("Invalid property " + property + " for entity " + typeName);
throw new MetadataException("Invalid property "
+ property + " for entity " + typeName);
}
DataTypes.TypeCategory attrTypeCategory = attributeInfo.dataType().getTypeCategory();
......@@ -344,8 +347,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
throw new RepositoryException("Update of " + attrTypeCategory + " is not supported");
}
instanceToGraphMapper.mapAttributesToVertex(getIdFromVertex(typeName, instanceVertex), instance,
instanceVertex, new HashMap<Id, Vertex>(), attributeInfo, attributeInfo.dataType());
instanceToGraphMapper.mapAttributesToVertex(getIdFromVertex(typeName, instanceVertex),
instance, instanceVertex, new HashMap<Id, Vertex>(),
attributeInfo, attributeInfo.dataType());
} catch (Exception e) {
throw new RepositoryException(e);
}
......
......@@ -282,12 +282,14 @@ public class GraphBackedTypeStore implements ITypeStore {
private AttributeDefinition[] getAttributes(Vertex vertex, String typeName) throws MetadataException {
List<AttributeDefinition> attributes = new ArrayList<>();
List<String> attrNames = vertex.getProperty(getPropertyKey(typeName));
for (String attrName : attrNames) {
try {
String propertyKey = getPropertyKey(typeName, attrName);
attributes.add(AttributeInfo.fromJson((String) vertex.getProperty(propertyKey)));
} catch (JSONException e) {
throw new MetadataException(e);
if (attrNames != null) {
for (String attrName : attrNames) {
try {
String propertyKey = getPropertyKey(typeName, attrName);
attributes.add(AttributeInfo.fromJson((String) vertex.getProperty(propertyKey)));
} catch (JSONException e) {
throw new MetadataException(e);
}
}
}
return attributes.toArray(new AttributeDefinition[attributes.size()]);
......
......@@ -29,7 +29,7 @@ public interface ITypeStore {
* @param typeSystem type system to persist
* @throws StorageException
*/
public void store(TypeSystem typeSystem) throws MetadataException;
void store(TypeSystem typeSystem) throws MetadataException;
/**
* Persist the given type in the type system - insert or update
......@@ -37,12 +37,12 @@ public interface ITypeStore {
* @param types types to persist
* @throws StorageException
*/
public void store(TypeSystem typeSystem, ImmutableList<String> types) throws MetadataException;
void store(TypeSystem typeSystem, ImmutableList<String> types) throws MetadataException;
/**
* Restore all type definitions
* @return List of persisted type definitions
* @throws org.apache.hadoop.metadata.MetadataException
*/
public TypesDef restore() throws MetadataException;
TypesDef restore() throws MetadataException;
}
......@@ -20,19 +20,14 @@ package org.apache.hadoop.metadata.services;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Provider;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.classification.InterfaceAudience;
import org.apache.hadoop.metadata.discovery.SearchIndexer;
import org.apache.hadoop.metadata.listener.EntityChangeListener;
import org.apache.hadoop.metadata.repository.IndexCreationException;
import org.apache.hadoop.metadata.repository.IndexException;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
......@@ -45,11 +40,14 @@ import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.TypeUtils;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
......@@ -58,8 +56,7 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.io.IOException;
import java.text.ParseException;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
......@@ -97,11 +94,12 @@ public class DefaultMetadataService implements MetadataService {
private void restoreTypeSystem() {
LOG.info("Restoring type system from the store");
try {
createSuperTypes();
TypesDef typesDef = typeStore.restore();
typeSystem.defineTypes(typesDef);
// restore types before creating super types
createSuperTypes();
} catch (MetadataException e) {
throw new RuntimeException(e);
}
......@@ -114,35 +112,33 @@ public class DefaultMetadataService implements MetadataService {
TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE);
@InterfaceAudience.Private
public void createSuperTypes() throws MetadataException {
private void createSuperTypes() throws MetadataException {
if (typeSystem.isRegistered(MetadataServiceClient.DATA_SET_SUPER_TYPE)) {
return; // this is already registered
}
HierarchicalTypeDefinition<ClassType> superTypeDefinition =
HierarchicalTypeDefinition<ClassType> infraType =
TypesUtil.createClassTypeDef(MetadataServiceClient.INFRASTRUCTURE_SUPER_TYPE,
ImmutableList.<String>of(),
NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE);
typeSystem.defineClassType(superTypeDefinition);
ImmutableList.<String>of(), NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE);
superTypeDefinition =
TypesUtil.createClassTypeDef(MetadataServiceClient.DATA_SET_SUPER_TYPE,
ImmutableList.<String>of(),
HierarchicalTypeDefinition<ClassType> datasetType = TypesUtil
.createClassTypeDef(MetadataServiceClient.DATA_SET_SUPER_TYPE, ImmutableList.<String>of(),
NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE);
typeSystem.defineClassType(superTypeDefinition);
superTypeDefinition =
TypesUtil.createClassTypeDef(MetadataServiceClient.PROCESS_SUPER_TYPE,
ImmutableList.<String>of(),
NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE,
new AttributeDefinition("inputs",
HierarchicalTypeDefinition<ClassType> processType = TypesUtil
.createClassTypeDef(MetadataServiceClient.PROCESS_SUPER_TYPE, ImmutableList.<String>of(),
NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE, new AttributeDefinition("inputs",
DataTypes.arrayTypeName(MetadataServiceClient.DATA_SET_SUPER_TYPE),
new Multiplicity(0, Integer.MAX_VALUE, false), false, null),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("outputs",
DataTypes.arrayTypeName(MetadataServiceClient.DATA_SET_SUPER_TYPE),
new Multiplicity(0, Integer.MAX_VALUE, false), false, null)
);
typeSystem.defineClassType(superTypeDefinition);
Multiplicity.OPTIONAL, false, null));
TypesDef typesDef = TypeUtils
.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
ImmutableList.of(infraType, datasetType, processType));
createType(TypesSerialization.toJson(typesDef));
}
/**
......@@ -333,6 +329,9 @@ public class DefaultMetadataService implements MetadataService {
// ensure trait type is already registered with the TS
Preconditions.checkArgument(typeSystem.isRegistered(traitName),
"trait=%s should be defined in type system before it can be added", traitName);
// ensure trait is not already defined
Preconditions.checkArgument(!getTraitNames(guid).contains(traitName),
"trait=%s is already defined for entity=%s", traitName, guid);
repository.addTrait(guid, traitInstance);
......
......@@ -20,18 +20,18 @@ package org.apache.hadoop.metadata.query
import com.thinkaurelius.titan.core.TitanGraph
import org.apache.hadoop.metadata.query.Expressions._
import org.slf4j.{LoggerFactory, Logger}
object QueryProcessor {
val LOG : Logger = LoggerFactory.getLogger("org.apache.hadoop.metadata.query.QueryProcessor")
def evaluate(e: Expression, g: TitanGraph, gP : GraphPersistenceStrategies = GraphPersistenceStrategy1):
GremlinQueryResult = {
val e1 = validate(e)
val q = new GremlinTranslator(e1, gP).translate()
// println("---------------------")
// println("Query: " + e1)
// println("Expression Tree:\n" + e1.treeString)
// println("Gremlin Query: " + q.queryStr)
// println("---------------------")
LOG.debug("Query: " + e1)
LOG.debug("Expression Tree:\n" + e1.treeString)
LOG.debug("Gremlin Query: " + q.queryStr)
new GremlinEvaluator(q, gP, g).evaluate()
}
......
......@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.repository.EntityNotFoundException;
import org.apache.hadoop.metadata.services.DefaultMetadataService;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.TypesDef;
......@@ -174,7 +175,7 @@ public class HiveLineageServiceTest {
Assert.fail();
}
@Test (expectedExceptions = IllegalArgumentException.class)
@Test (expectedExceptions = EntityNotFoundException.class)
public void testGetInputsBadTableName() throws Exception {
hiveLineageService.getInputs("blah");
Assert.fail();
......@@ -223,7 +224,7 @@ public class HiveLineageServiceTest {
Assert.fail();
}
@Test (expectedExceptions = IllegalArgumentException.class)
@Test (expectedExceptions = EntityNotFoundException.class)
public void testGetOutputsBadTableName() throws Exception {
hiveLineageService.getOutputs("blah");
Assert.fail();
......@@ -285,7 +286,7 @@ public class HiveLineageServiceTest {
Assert.fail();
}
@Test (expectedExceptions = IllegalArgumentException.class)
@Test (expectedExceptions = EntityNotFoundException.class)
public void testGetSchemaBadTableName() throws Exception {
hiveLineageService.getSchema("blah");
Assert.fail();
......
......@@ -29,6 +29,7 @@ import org.apache.hadoop.metadata.TestUtils;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.repository.BaseTest;
import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.EntityNotFoundException;
import org.apache.hadoop.metadata.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.IStruct;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
......@@ -227,7 +228,7 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertEquals(traitNames.size(), 0);
}
@Test (expectedExceptions = RepositoryException.class)
@Test (expectedExceptions = EntityNotFoundException.class)
public void testGetTraitNamesForBadEntity() throws Exception {
repositoryService.getTraitNames(UUID.randomUUID().toString());
Assert.fail();
......
......@@ -98,13 +98,17 @@ public class GraphBackedTypeStoreTest {
List<StructTypeDefinition> structTypes = types.structTypesAsJavaList();
Assert.assertEquals(1, structTypes.size());
boolean clsTypeFound = false;
List<HierarchicalTypeDefinition<ClassType>> classTypes = types.classTypesAsJavaList();
Assert.assertEquals(3, classTypes.size());
for (HierarchicalTypeDefinition<ClassType> classType : classTypes) {
ClassType expectedType = ts.getDataType(ClassType.class, classType.typeName);
Assert.assertEquals(expectedType.immediateAttrs.size(), classType.attributeDefinitions.length);
Assert.assertEquals(expectedType.superTypes.size(), classType.superTypes.size());
if (classType.typeName.equals("Manager")) {
ClassType expectedType = ts.getDataType(ClassType.class, classType.typeName);
Assert.assertEquals(expectedType.immediateAttrs.size(), classType.attributeDefinitions.length);
Assert.assertEquals(expectedType.superTypes.size(), classType.superTypes.size());
clsTypeFound = true;
}
}
Assert.assertTrue("Manager type not restored", clsTypeFound);
//validate trait
List<HierarchicalTypeDefinition<TraitType>> traitTypes = types.traitTypesAsJavaList();
......
......@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.IReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
......@@ -455,7 +456,7 @@ public class DataTypes {
@Override
public String convert(Object val, Multiplicity m) throws MetadataException {
if (val != null) {
if (StringUtils.isNotBlank((CharSequence) val)) {
return val.toString();
}
return convertNull(m);
......
......@@ -70,7 +70,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
this.fieldMapping = null;
this.numFields = numFields;
this.superTypes = superTypes;
this.immediateAttrs = null;
this.immediateAttrs = ImmutableList.of();
this.attributeNameToType = null;
}
......@@ -86,7 +86,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
this.attributeNameToType = p.right;
this.numFields = this.fieldMapping.fields.size();
this.superTypes = superTypes == null ? ImmutableList.<String>of() : superTypes;
this.immediateAttrs = ImmutableList.<AttributeInfo>copyOf(fields);
this.immediateAttrs = ImmutableList.copyOf(fields);
}
@Override
......@@ -184,7 +184,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
(ST) typeSystem.getDataType(superTypeClass, currentPath.typeName);
ImmutableList<AttributeInfo> superTypeFields = superType == this ?
ImmutableList.<AttributeInfo>copyOf(fields) : superType.immediateAttrs;
ImmutableList.copyOf(fields) : superType.immediateAttrs;
Set<String> immediateFields = new HashSet<String>();
......
......@@ -76,7 +76,7 @@ public class AdminResource {
*/
@GET
@Path("version")
@Produces(MediaType.APPLICATION_JSON)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getVersion() {
if (version == null) {
try {
......
......@@ -21,6 +21,8 @@ package org.apache.hadoop.metadata.web.resources;
import com.google.common.base.Preconditions;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker;
import org.apache.hadoop.metadata.repository.EntityNotFoundException;
import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.typesystem.types.ValueConversionException;
import org.apache.hadoop.metadata.web.util.Servlets;
......@@ -43,7 +45,6 @@ import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
......@@ -85,8 +86,8 @@ public class EntityResource {
* Submits an entity definition (instance) corresponding to a given type.
*/
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response submit(@Context HttpServletRequest request) {
try {
final String entity = Servlets.getRequestPayload(request);
......@@ -127,10 +128,11 @@ public class EntityResource {
*/
@GET
@Path("{guid}")
@Produces(MediaType.APPLICATION_JSON)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getEntityDefinition(@PathParam("guid") String guid) {
try {
LOG.debug("Fetching entity definition for guid={} ", guid);
ParamChecker.notEmpty(guid, "guid cannot be null");
final String entityDefinition = metadataService.getEntityDefinition(guid);
JSONObject response = new JSONObject();
......@@ -142,15 +144,20 @@ public class EntityResource {
response.put(MetadataServiceClient.DEFINITION, entityDefinition);
status = Response.Status.OK;
} else {
response.put(MetadataServiceClient.ERROR, Servlets.escapeJsonString(String.format("An entity with GUID={%s} does not exist", guid)));
response.put(MetadataServiceClient.ERROR, Servlets.escapeJsonString(
String.format("An entity with GUID={%s} does not exist", guid)));
}
return Response.status(status).entity(response).build();
} catch (MetadataException | IllegalArgumentException e) {
} catch (EntityNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (MetadataException | IllegalArgumentException e) {
LOG.error("Bad GUID={}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get instance definition for GUID {}", guid, e);
throw new WebApplicationException(
......@@ -164,7 +171,7 @@ public class EntityResource {
* @param entityType name of a type which is unique
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getEntityListByType(@QueryParam("type") String entityType) {
try {
Preconditions.checkNotNull(entityType, "Entity type cannot be null");
......@@ -203,17 +210,25 @@ public class EntityResource {
*/
@PUT
@Path("{guid}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response update(@PathParam("guid") String guid,
@QueryParam("property") String property,
@QueryParam("value") String value) {
try {
Preconditions.checkNotNull(property, "Entity property cannot be null");
Preconditions.checkNotNull(value, "Entity value cannot be null");
metadataService.updateEntity(guid, property, value);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Thread.currentThread().getName());
return Response.ok(response).build();
} catch (MetadataException e) {
} catch (EntityNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to add property {} to entity id {}", property, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
......@@ -233,7 +248,7 @@ public class EntityResource {
*/
@GET
@Path("{guid}/traits")
@Produces(MediaType.APPLICATION_JSON)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getTraitNames(@PathParam("guid") String guid) {
try {
LOG.debug("Fetching trait names for entity={}", guid);
......@@ -246,6 +261,10 @@ public class EntityResource {
response.put(MetadataServiceClient.COUNT, traitNames.size());
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to get trait names for entity {}", guid, e);
throw new WebApplicationException(
......@@ -264,8 +283,8 @@ public class EntityResource {
*/
@POST
@Path("{guid}/traits")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response addTrait(@Context HttpServletRequest request,
@PathParam("guid") String guid) {
try {
......@@ -281,6 +300,10 @@ public class EntityResource {
response.put(MetadataServiceClient.GUID, guid);
return Response.created(locationURI).entity(response).build();
} catch (EntityNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (MetadataException | IOException | IllegalArgumentException e) {
LOG.error("Unable to add trait for entity={}", guid, e);
throw new WebApplicationException(
......@@ -300,8 +323,8 @@ public class EntityResource {
*/
@DELETE
@Path("{guid}/traits/{traitName}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response deleteTrait(@Context HttpServletRequest request,
@PathParam("guid") String guid,
@PathParam(TRAIT_NAME) String traitName) {
......@@ -315,6 +338,10 @@ public class EntityResource {
response.put(TRAIT_NAME, traitName);
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e);
throw new WebApplicationException(
......
......@@ -22,6 +22,7 @@ import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker;
import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.LineageService;
import org.apache.hadoop.metadata.repository.EntityNotFoundException;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
......@@ -32,7 +33,6 @@ import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
/**
......@@ -64,8 +64,8 @@ public class HiveLineageResource {
*/
@GET
@Path("table/{tableName}/inputs/graph")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response inputsGraph(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
LOG.info("Fetching lineage inputs graph for tableName={}", tableName);
......@@ -80,6 +80,10 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("table entity not found for {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage inputs graph for table {}", tableName, e);
throw new WebApplicationException(
......@@ -98,8 +102,8 @@ public class HiveLineageResource {
*/
@GET
@Path("table/{tableName}/outputs/graph")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response outputsGraph(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
LOG.info("Fetching lineage outputs graph for tableName={}", tableName);
......@@ -114,6 +118,10 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("table entity not found for {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage outputs graph for table {}", tableName, e);
throw new WebApplicationException(
......@@ -132,8 +140,8 @@ public class HiveLineageResource {
*/
@GET
@Path("table/{tableName}/schema")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response schema(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
LOG.info("Fetching schema for tableName={}", tableName);
......@@ -148,6 +156,10 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("table entity not found for {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get schema for table {}", tableName, e);
throw new WebApplicationException(
......
......@@ -32,12 +32,12 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.List;
import java.util.Map;
......@@ -75,7 +75,8 @@ public class MetadataDiscoveryResource {
*/
@GET
@Path("search")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response search(@QueryParam("query") String query) {
JSONObject response;
try { // fall back to dsl
......@@ -125,7 +126,8 @@ public class MetadataDiscoveryResource {
*/
@GET
@Path("search/dsl")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response searchUsingQueryDSL(@QueryParam("query") String dslQuery) {
try {
ParamChecker.notEmpty(dslQuery, "dslQuery cannot be null");
......@@ -156,7 +158,8 @@ public class MetadataDiscoveryResource {
*/
@GET
@Path("search/gremlin")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response searchUsingGremlinQuery(@QueryParam("query") String gremlinQuery) {
try {
ParamChecker.notEmpty(gremlinQuery, "gremlinQuery cannot be null or empty");
......@@ -196,7 +199,8 @@ public class MetadataDiscoveryResource {
*/
@GET
@Path("search/fulltext")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response searchUsingFullText(@QueryParam("query") String query) {
try {
ParamChecker.notEmpty(query, "query cannot be null or empty");
......
......@@ -46,7 +46,6 @@ import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.HashMap;
import java.util.Map;
......@@ -115,7 +114,7 @@ public class RexsterGraphResource {
*/
@GET
@Path("/vertices/{id}")
@Produces({MediaType.APPLICATION_JSON})
@Produces({Servlets.JSON_MEDIA_TYPE})
public Response getVertex(@PathParam("id") final String vertexId) {
LOG.info("Get vertex for vertexId= {}", vertexId);
validateInputs("Invalid argument: vertex id passed is null or empty.", vertexId);
......@@ -152,7 +151,7 @@ public class RexsterGraphResource {
*/
@GET
@Path("/vertices/properties/{id}")
@Produces({MediaType.APPLICATION_JSON})
@Produces({Servlets.JSON_MEDIA_TYPE})
public Response getVertexProperties(@PathParam("id") final String vertexId,
@DefaultValue("false") @QueryParam("relationships")
final String relationships) {
......@@ -192,7 +191,7 @@ public class RexsterGraphResource {
*/
@GET
@Path("/vertices")
@Produces({MediaType.APPLICATION_JSON})
@Produces({Servlets.JSON_MEDIA_TYPE})
public Response getVertices(@QueryParam("key") final String key,
@QueryParam("value") final String value) {
LOG.info("Get vertices for property key= {}, value= {}", key, value);
......@@ -216,7 +215,7 @@ public class RexsterGraphResource {
*/
@GET
@Path("vertices/{id}/{direction}")
@Produces({MediaType.APPLICATION_JSON})
@Produces({Servlets.JSON_MEDIA_TYPE})
public Response getVertexEdges(@PathParam("id") String vertexId,
@PathParam("direction") String direction) {
LOG.info("Get vertex edges for vertexId= {}, direction= {}", vertexId, direction);
......@@ -288,7 +287,7 @@ public class RexsterGraphResource {
*/
@GET
@Path("/edges/{id}")
@Produces({MediaType.APPLICATION_JSON})
@Produces({Servlets.JSON_MEDIA_TYPE})
public Response getEdge(@PathParam("id") final String edgeId) {
LOG.info("Get vertex for edgeId= {}", edgeId);
validateInputs("Invalid argument: edge id passed is null or empty.", edgeId);
......
......@@ -43,9 +43,9 @@ import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
......@@ -78,8 +78,8 @@ public class TypesResource {
* domain. Could represent things like Hive Database, Hive Table, etc.
*/
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response submit(@Context HttpServletRequest request) {
try {
final String typeDefinition = Servlets.getRequestPayload(request);
......@@ -88,18 +88,17 @@ public class TypesResource {
JSONObject typesJson = metadataService.createType(typeDefinition);
final JSONArray typesJsonArray = typesJson.getJSONArray(MetadataServiceClient.TYPES);
List<Map<String, String>> typesAddedList = new ArrayList<>();
JSONArray typesResponse = new JSONArray();
for (int i = 0; i < typesJsonArray.length(); i++) {
final String name = typesJsonArray.getString(i);
typesAddedList.add(
new HashMap<String, String>() {{
put(MetadataServiceClient.NAME, name);
}});
typesResponse.put(new JSONObject() {{
put(MetadataServiceClient.NAME, name);
}});
}
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.TYPES, typesAddedList);
response.put(MetadataServiceClient.TYPES, typesResponse);
return Response.status(ClientResponse.Status.CREATED).entity(response).build();
} catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to persist types", e);
......@@ -119,7 +118,7 @@ public class TypesResource {
*/
@GET
@Path("{typeName}")
@Produces(MediaType.APPLICATION_JSON)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getDefinition(@Context HttpServletRequest request,
@PathParam("typeName") String typeName) {
try {
......@@ -155,7 +154,7 @@ public class TypesResource {
* @return entity names response payload as json
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getTypesByFilter(@Context HttpServletRequest request,
@DefaultValue(TYPE_ALL) @QueryParam("type") String type) {
try {
......
......@@ -18,7 +18,6 @@
package org.apache.hadoop.metadata.web.util;
import com.google.common.base.Preconditions;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
......@@ -41,13 +40,13 @@ import java.io.StringWriter;
*/
public final class Servlets {
public static final String QUOTE = "\"";
private static final Logger LOG = LoggerFactory.getLogger(Servlets.class);
private Servlets() {
/* singleton */
}
public static final String JSON_MEDIA_TYPE = MediaType.APPLICATION_JSON + "; charset=UTF-8";
/**
* Returns the user of the given request.
*
......@@ -132,7 +131,7 @@ public final class Servlets {
return Response
.status(status)
.entity(errorEntity)
.type(MediaType.APPLICATION_JSON)
.type(JSON_MEDIA_TYPE)
.build();
}
......
......@@ -21,13 +21,13 @@ package org.apache.hadoop.metadata.web.resources;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
/**
......@@ -46,8 +46,8 @@ public class AdminJerseyResourceIT extends BaseResourceIT {
.path("api/metadata/admin/version");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......
......@@ -30,12 +30,12 @@ import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
......@@ -73,8 +73,8 @@ public abstract class BaseResourceIT {
.path("api/metadata/types");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.POST, ClientResponse.class, typesAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.CREATED.getStatusCode());
......
......@@ -37,6 +37,7 @@ import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeUtils;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
......@@ -44,7 +45,6 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.List;
......@@ -72,8 +72,8 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
.path("graph");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -106,8 +106,8 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
.path("graph");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -139,8 +139,8 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
.path("schema");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -174,8 +174,8 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
.path("schema");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(),
Response.Status.NOT_FOUND.getStatusCode());
......@@ -189,11 +189,11 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
.path("schema");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(),
Response.Status.BAD_REQUEST.getStatusCode());
Response.Status.NOT_FOUND.getStatusCode());
}
private void setUpTypes() throws Exception {
......
......@@ -34,6 +34,7 @@ import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeUtils;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
......@@ -41,7 +42,6 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.List;
......@@ -66,8 +66,8 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
.queryParam("query", dslQuery);
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -99,8 +99,8 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
.queryParam("query", dslQuery);
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(),
Response.Status.BAD_REQUEST.getStatusCode());
......@@ -114,8 +114,8 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
.queryParam("query", query);
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -137,8 +137,8 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
.queryParam("query", query);
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......
......@@ -20,12 +20,12 @@ package org.apache.hadoop.metadata.web.resources;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
/**
......@@ -49,8 +49,8 @@ public class RexsterGraphJerseyResourceIT extends BaseResourceIT {
.path("0");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String response = clientResponse.getEntity(String.class);
......@@ -62,8 +62,8 @@ public class RexsterGraphJerseyResourceIT extends BaseResourceIT {
.path("api/metadata/graph/vertices/blah");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
}
......
......@@ -32,6 +32,7 @@ import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
......@@ -40,7 +41,6 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.List;
......@@ -74,8 +74,8 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
.path("api/metadata/types");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.POST, ClientResponse.class, typesAsJSON);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.CREATED.getStatusCode());
......@@ -83,7 +83,9 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(MetadataServiceClient.TYPES));
JSONArray typesAdded = response.getJSONArray(MetadataServiceClient.TYPES);
Assert.assertEquals(typesAdded.length(), 1);
Assert.assertEquals(typesAdded.getJSONObject(0).getString("name"), typeDefinition.typeName);
Assert.assertNotNull(response.get(MetadataServiceClient.REQUEST_ID));
}
}
......@@ -98,8 +100,8 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
.path(typeDefinition.typeName);
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -130,8 +132,8 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
.path("blah");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
}
......@@ -142,8 +144,8 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
.path("api/metadata/types");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......@@ -166,8 +168,8 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
ClientResponse clientResponse = resource
.queryParam("type", DataTypes.TypeCategory.TRAIT.name())
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.accept(Servlets.JSON_MEDIA_TYPE)
.type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment