Commit 7cf0db5b by Shwetha GS

Merge branch 'master' into dal

parents c4a7d6f7 d86caa89
...@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.session.SessionState; ...@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge; import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes; import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert; import org.testng.Assert;
...@@ -32,6 +33,7 @@ import org.testng.annotations.BeforeClass; ...@@ -32,6 +33,7 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.io.File; import java.io.File;
import java.util.Map;
public class HiveHookIT { public class HiveHookIT {
private static final String DGI_URL = "http://localhost:21000/"; private static final String DGI_URL = "http://localhost:21000/";
...@@ -76,8 +78,13 @@ public class HiveHookIT { ...@@ -76,8 +78,13 @@ public class HiveHookIT {
@Test @Test
public void testCreateDatabase() throws Exception { public void testCreateDatabase() throws Exception {
String dbName = "db" + random(); String dbName = "db" + random();
runCommand("create database " + dbName); runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1', 'p2'='v2')");
assertDatabaseIsRegistered(dbName); String dbId = assertDatabaseIsRegistered(dbName);
Referenceable definition = dgiCLient.getEntity(dbId);
Map params = (Map) definition.get("parameters");
Assert.assertNotNull(params);
Assert.assertEquals(params.size(), 2);
Assert.assertEquals(params.get("p1"), "v1");
//There should be just one entity per dbname //There should be just one entity per dbname
runCommand("drop database " + dbName); runCommand("drop database " + dbName);
...@@ -213,10 +220,10 @@ public class HiveHookIT { ...@@ -213,10 +220,10 @@ public class HiveHookIT {
assertEntityIsRegistered(query); assertEntityIsRegistered(query);
} }
private void assertDatabaseIsRegistered(String dbName) throws Exception { private String assertDatabaseIsRegistered(String dbName) throws Exception {
String query = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(), String query = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(),
dbName, CLUSTER_NAME); dbName, CLUSTER_NAME);
assertEntityIsRegistered(query); return assertEntityIsRegistered(query);
} }
private void assertPartitionIsRegistered(String dbName, String tableName, String value) throws Exception { private void assertPartitionIsRegistered(String dbName, String tableName, String value) throws Exception {
...@@ -233,8 +240,9 @@ public class HiveHookIT { ...@@ -233,8 +240,9 @@ public class HiveHookIT {
Assert.assertEquals(results.length(), 1); Assert.assertEquals(results.length(), 1);
} }
private void assertEntityIsRegistered(String dslQuery) throws Exception{ private String assertEntityIsRegistered(String dslQuery) throws Exception{
JSONArray results = dgiCLient.searchByDSL(dslQuery); JSONArray results = dgiCLient.searchByDSL(dslQuery);
Assert.assertEquals(results.length(), 1); Assert.assertEquals(results.length(), 1);
return results.getJSONObject(0).getJSONObject("$id$").getString("id");
} }
} }
...@@ -51,7 +51,7 @@ public class MetadataServiceClient { ...@@ -51,7 +51,7 @@ public class MetadataServiceClient {
public static final String NAME = "name"; public static final String NAME = "name";
public static final String GUID = "GUID"; public static final String GUID = "GUID";
public static final String TYPENAME = "typeName"; public static final String TYPENAME = "typeName";
public static final String TYPE = "type";
public static final String DEFINITION = "definition"; public static final String DEFINITION = "definition";
public static final String ERROR = "error"; public static final String ERROR = "error";
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD)
public @interface GraphTransaction {}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata;
import com.google.inject.Inject;
import com.thinkaurelius.titan.core.TitanGraph;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GraphTransactionInterceptor implements MethodInterceptor {
private static final Logger LOG = LoggerFactory.getLogger(GraphTransactionInterceptor.class);
private TitanGraph titanGraph;
@Inject
GraphProvider<TitanGraph> graphProvider;
public Object invoke(MethodInvocation invocation) throws Throwable {
if (titanGraph == null) {
titanGraph = graphProvider.get();
}
try {
LOG.debug("graph rollback to cleanup previous state");
titanGraph.rollback(); //cleanup previous state
Object response = invocation.proceed();
titanGraph.commit();
LOG.debug("graph commit");
return response;
} catch (Throwable t){
titanGraph.rollback();
LOG.debug("graph rollback");
throw t;
}
}
}
...@@ -19,8 +19,10 @@ ...@@ -19,8 +19,10 @@
package org.apache.hadoop.metadata; package org.apache.hadoop.metadata;
import com.google.inject.Scopes; import com.google.inject.Scopes;
import com.google.inject.matcher.Matchers;
import com.google.inject.throwingproviders.ThrowingProviderBinder; import com.google.inject.throwingproviders.ThrowingProviderBinder;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import org.aopalliance.intercept.MethodInterceptor;
import org.apache.hadoop.metadata.discovery.DiscoveryService; import org.apache.hadoop.metadata.discovery.DiscoveryService;
import org.apache.hadoop.metadata.discovery.HiveLineageService; import org.apache.hadoop.metadata.discovery.HiveLineageService;
import org.apache.hadoop.metadata.discovery.LineageService; import org.apache.hadoop.metadata.discovery.LineageService;
...@@ -40,31 +42,7 @@ import org.apache.hadoop.metadata.services.MetadataService; ...@@ -40,31 +42,7 @@ import org.apache.hadoop.metadata.services.MetadataService;
* Guice module for Repository module. * Guice module for Repository module.
*/ */
public class RepositoryMetadataModule extends com.google.inject.AbstractModule { public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
@Override
// Graph Service implementation class
// private Class<? extends GraphService> graphServiceClass;
// MetadataRepositoryService implementation class
private Class<? extends MetadataRepository> metadataRepoClass;
private Class<? extends ITypeStore> typeStore;
private Class<? extends MetadataService> metadataService;
private Class<? extends DiscoveryService> discoveryService;
private Class<? extends SearchIndexer> searchIndexer;
private Class<? extends LineageService> lineageService;
public RepositoryMetadataModule() {
// GraphServiceConfigurator gsp = new GraphServiceConfigurator();
// get the impl classes for the repo and the graph service
// this.graphServiceClass = gsp.getImplClass();
this.metadataRepoClass = GraphBackedMetadataRepository.class;
this.typeStore = GraphBackedTypeStore.class;
this.metadataService = DefaultMetadataService.class;
this.discoveryService = GraphBackedDiscoveryService.class;
this.searchIndexer = GraphBackedSearchIndexer.class;
this.lineageService = HiveLineageService.class;
}
protected void configure() { protected void configure() {
// special wiring for Titan Graph // special wiring for Titan Graph
ThrowingProviderBinder.create(binder()) ThrowingProviderBinder.create(binder())
...@@ -75,22 +53,26 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule { ...@@ -75,22 +53,26 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// allow for dynamic binding of the metadata repo & graph service // allow for dynamic binding of the metadata repo & graph service
// bind the MetadataRepositoryService interface to an implementation // bind the MetadataRepositoryService interface to an implementation
bind(MetadataRepository.class).to(metadataRepoClass); bind(MetadataRepository.class).to(GraphBackedMetadataRepository.class);
// bind the ITypeStore interface to an implementation // bind the ITypeStore interface to an implementation
bind(ITypeStore.class).to(typeStore); bind(ITypeStore.class).to(GraphBackedTypeStore.class);
// bind the GraphService interface to an implementation // bind the GraphService interface to an implementation
// bind(GraphService.class).to(graphServiceClass); // bind(GraphService.class).to(graphServiceClass);
// bind the MetadataService interface to an implementation // bind the MetadataService interface to an implementation
bind(MetadataService.class).to(metadataService); bind(MetadataService.class).to(DefaultMetadataService.class);
// bind the DiscoveryService interface to an implementation // bind the DiscoveryService interface to an implementation
bind(DiscoveryService.class).to(discoveryService); bind(DiscoveryService.class).to(GraphBackedDiscoveryService.class);
bind(SearchIndexer.class).to(GraphBackedSearchIndexer.class);
bind(SearchIndexer.class).to(searchIndexer); bind(LineageService.class).to(HiveLineageService.class);
bind(LineageService.class).to(lineageService); MethodInterceptor interceptor = new GraphTransactionInterceptor();
requestInjection(interceptor);
bindInterceptor(Matchers.any(), Matchers.annotatedWith(GraphTransaction.class), interceptor);
} }
} }
...@@ -20,6 +20,7 @@ package org.apache.hadoop.metadata.discovery; ...@@ -20,6 +20,7 @@ package org.apache.hadoop.metadata.discovery;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil; import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.discovery.graph.DefaultGraphPersistenceStrategy; import org.apache.hadoop.metadata.discovery.graph.DefaultGraphPersistenceStrategy;
...@@ -96,6 +97,7 @@ public class HiveLineageService implements LineageService { ...@@ -96,6 +97,7 @@ public class HiveLineageService implements LineageService {
* @return Lineage Outputs as JSON * @return Lineage Outputs as JSON
*/ */
@Override @Override
@GraphTransaction
public String getOutputs(String tableName) throws DiscoveryException { public String getOutputs(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage outputs for tableName={}", tableName); LOG.info("Fetching lineage outputs for tableName={}", tableName);
...@@ -121,6 +123,7 @@ public class HiveLineageService implements LineageService { ...@@ -121,6 +123,7 @@ public class HiveLineageService implements LineageService {
* @return Outputs Graph as JSON * @return Outputs Graph as JSON
*/ */
@Override @Override
@GraphTransaction
public String getOutputsGraph(String tableName) throws DiscoveryException { public String getOutputsGraph(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage outputs graph for tableName={}", tableName); LOG.info("Fetching lineage outputs graph for tableName={}", tableName);
...@@ -139,6 +142,7 @@ public class HiveLineageService implements LineageService { ...@@ -139,6 +142,7 @@ public class HiveLineageService implements LineageService {
* @return Lineage Inputs as JSON * @return Lineage Inputs as JSON
*/ */
@Override @Override
@GraphTransaction
public String getInputs(String tableName) throws DiscoveryException { public String getInputs(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage inputs for tableName={}", tableName); LOG.info("Fetching lineage inputs for tableName={}", tableName);
...@@ -164,6 +168,7 @@ public class HiveLineageService implements LineageService { ...@@ -164,6 +168,7 @@ public class HiveLineageService implements LineageService {
* @return Inputs Graph as JSON * @return Inputs Graph as JSON
*/ */
@Override @Override
@GraphTransaction
public String getInputsGraph(String tableName) throws DiscoveryException { public String getInputsGraph(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage inputs graph for tableName={}", tableName); LOG.info("Fetching lineage inputs graph for tableName={}", tableName);
...@@ -182,6 +187,7 @@ public class HiveLineageService implements LineageService { ...@@ -182,6 +187,7 @@ public class HiveLineageService implements LineageService {
* @return Schema as JSON * @return Schema as JSON
*/ */
@Override @Override
@GraphTransaction
public String getSchema(String tableName) throws DiscoveryException { public String getSchema(String tableName) throws DiscoveryException {
// todo - validate if indeed this is a table type and exists // todo - validate if indeed this is a table type and exists
String schemaQuery = HIVE_TABLE_TYPE_NAME String schemaQuery = HIVE_TABLE_TYPE_NAME
......
...@@ -25,6 +25,7 @@ import com.thinkaurelius.titan.core.TitanVertex; ...@@ -25,6 +25,7 @@ import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.gremlin.groovy.Gremlin; import com.tinkerpop.gremlin.groovy.Gremlin;
import com.tinkerpop.gremlin.java.GremlinPipeline; import com.tinkerpop.gremlin.java.GremlinPipeline;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.discovery.DiscoveryException; import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.DiscoveryService; import org.apache.hadoop.metadata.discovery.DiscoveryService;
...@@ -82,6 +83,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -82,6 +83,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
//http://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query //http://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query
// .html#query-string-syntax for query syntax // .html#query-string-syntax for query syntax
@Override @Override
@GraphTransaction
public String searchByFullText(String query) throws DiscoveryException { public String searchByFullText(String query) throws DiscoveryException {
String graphQuery = String.format("v.%s:(%s)", Constants.ENTITY_TEXT_PROPERTY_KEY, query); String graphQuery = String.format("v.%s:(%s)", Constants.ENTITY_TEXT_PROPERTY_KEY, query);
LOG.debug("Full text query: {}", graphQuery); LOG.debug("Full text query: {}", graphQuery);
...@@ -118,6 +120,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -118,6 +120,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
* @return JSON representing the type and results. * @return JSON representing the type and results.
*/ */
@Override @Override
@GraphTransaction
public String searchByDSL(String dslQuery) throws DiscoveryException { public String searchByDSL(String dslQuery) throws DiscoveryException {
LOG.info("Executing dsl query={}", dslQuery); LOG.info("Executing dsl query={}", dslQuery);
try { try {
...@@ -155,6 +158,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -155,6 +158,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
* @throws org.apache.hadoop.metadata.discovery.DiscoveryException * @throws org.apache.hadoop.metadata.discovery.DiscoveryException
*/ */
@Override @Override
@GraphTransaction
public List<Map<String, String>> searchByGremlin(String gremlinQuery) public List<Map<String, String>> searchByGremlin(String gremlinQuery)
throws DiscoveryException { throws DiscoveryException {
LOG.info("Executing gremlin query={}", gremlinQuery); LOG.info("Executing gremlin query={}", gremlinQuery);
......
...@@ -27,6 +27,7 @@ import com.tinkerpop.blueprints.Edge; ...@@ -27,6 +27,7 @@ import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.GraphQuery; import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.Constants; import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.hadoop.metadata.repository.MetadataRepository;
...@@ -137,27 +138,23 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -137,27 +138,23 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
} }
@Override @Override
@GraphTransaction
public String createEntity(IReferenceableInstance typedInstance) throws RepositoryException { public String createEntity(IReferenceableInstance typedInstance) throws RepositoryException {
LOG.info("adding entity={}", typedInstance); LOG.info("adding entity={}", typedInstance);
try { try {
titanGraph.rollback();
final String guid = instanceToGraphMapper.mapTypedInstanceToGraph(typedInstance); final String guid = instanceToGraphMapper.mapTypedInstanceToGraph(typedInstance);
titanGraph.commit(); // commit if there are no errors
return guid; return guid;
} catch (MetadataException e) { } catch (MetadataException e) {
titanGraph.rollback();
throw new RepositoryException(e); throw new RepositoryException(e);
} }
} }
@Override @Override
@GraphTransaction
public ITypedReferenceableInstance getEntityDefinition(String guid) throws RepositoryException { public ITypedReferenceableInstance getEntityDefinition(String guid) throws RepositoryException {
LOG.info("Retrieving entity with guid={}", guid); LOG.info("Retrieving entity with guid={}", guid);
try { try {
titanGraph.rollback(); // clean up before starting a query
Vertex instanceVertex = getVertexForGUID(guid); Vertex instanceVertex = getVertexForGUID(guid);
LOG.debug("Found a vertex {} for guid {}", instanceVertex, guid); LOG.debug("Found a vertex {} for guid {}", instanceVertex, guid);
...@@ -206,9 +203,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -206,9 +203,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
* @throws RepositoryException * @throws RepositoryException
*/ */
@Override @Override
@GraphTransaction
public List<String> getTraitNames(String guid) throws RepositoryException { public List<String> getTraitNames(String guid) throws RepositoryException {
LOG.info("Retrieving trait names for entity={}", guid); LOG.info("Retrieving trait names for entity={}", guid);
titanGraph.rollback(); // clean up before starting a query
Vertex instanceVertex = getVertexForGUID(guid); Vertex instanceVertex = getVertexForGUID(guid);
return getTraitNames(instanceVertex); return getTraitNames(instanceVertex);
} }
...@@ -231,6 +228,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -231,6 +228,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
* @throws RepositoryException * @throws RepositoryException
*/ */
@Override @Override
@GraphTransaction
public void addTrait(String guid, public void addTrait(String guid,
ITypedStruct traitInstance) throws RepositoryException { ITypedStruct traitInstance) throws RepositoryException {
Preconditions.checkNotNull(traitInstance, "Trait instance cannot be null"); Preconditions.checkNotNull(traitInstance, "Trait instance cannot be null");
...@@ -238,22 +236,18 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -238,22 +236,18 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
LOG.info("Adding a new trait={} for entity={}", traitName, guid); LOG.info("Adding a new trait={} for entity={}", traitName, guid);
try { try {
titanGraph.rollback(); // clean up before starting a query
Vertex instanceVertex = getVertexForGUID(guid); Vertex instanceVertex = getVertexForGUID(guid);
// add the trait instance as a new vertex // add the trait instance as a new vertex
final String typeName = getTypeName(instanceVertex); final String typeName = getTypeName(instanceVertex);
instanceToGraphMapper.mapTraitInstanceToVertex(traitInstance, instanceToGraphMapper.mapTraitInstanceToVertex(traitInstance, getIdFromVertex(typeName, instanceVertex),
getIdFromVertex(typeName, instanceVertex),
typeName, instanceVertex, Collections.<Id, Vertex>emptyMap()); typeName, instanceVertex, Collections.<Id, Vertex>emptyMap());
// update the traits in entity once adding trait instance is successful // update the traits in entity once adding trait instance is successful
((TitanVertex) instanceVertex) ((TitanVertex) instanceVertex)
.addProperty(Constants.TRAIT_NAMES_PROPERTY_KEY, traitName); .addProperty(Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
titanGraph.commit(); // commit if there are no errors
} catch (MetadataException e) { } catch (MetadataException e) {
titanGraph.rollback();
throw new RepositoryException(e); throw new RepositoryException(e);
} }
} }
...@@ -266,11 +260,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -266,11 +260,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
* @throws RepositoryException * @throws RepositoryException
*/ */
@Override @Override
@GraphTransaction
public void deleteTrait(String guid, String traitNameToBeDeleted) public void deleteTrait(String guid, String traitNameToBeDeleted)
throws RepositoryException { throws RepositoryException {
LOG.info("Deleting trait={} from entity={}", traitNameToBeDeleted, guid); LOG.info("Deleting trait={} from entity={}", traitNameToBeDeleted, guid);
try { try {
titanGraph.rollback(); // clean up before starting a query
Vertex instanceVertex = getVertexForGUID(guid); Vertex instanceVertex = getVertexForGUID(guid);
List<String> traitNames = getTraitNames(instanceVertex); List<String> traitNames = getTraitNames(instanceVertex);
...@@ -297,11 +291,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -297,11 +291,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
traitNames.remove(traitNameToBeDeleted); traitNames.remove(traitNameToBeDeleted);
updateTraits(instanceVertex, traitNames); updateTraits(instanceVertex, traitNames);
} }
titanGraph.commit(); // commit if there are no errors
} }
} catch (Exception e) { } catch (Exception e) {
titanGraph.rollback();
throw new RepositoryException(e); throw new RepositoryException(e);
} }
} }
...@@ -318,11 +309,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -318,11 +309,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
} }
@Override @Override
@GraphTransaction
public void updateEntity(String guid, String property, String value) throws RepositoryException { public void updateEntity(String guid, String property, String value) throws RepositoryException {
LOG.info("Adding property {} for entity guid {}", property, guid); LOG.info("Adding property {} for entity guid {}", property, guid);
try { try {
titanGraph.rollback(); // clean up before starting a query
Vertex instanceVertex = GraphHelper.findVertexByGUID(titanGraph, guid); Vertex instanceVertex = GraphHelper.findVertexByGUID(titanGraph, guid);
if (instanceVertex == null) { if (instanceVertex == null) {
throw new RepositoryException("Could not find a vertex for guid " + guid); throw new RepositoryException("Could not find a vertex for guid " + guid);
...@@ -349,11 +340,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -349,11 +340,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
instanceToGraphMapper.mapAttributesToVertex(getIdFromVertex(typeName, instanceVertex), instance, instanceToGraphMapper.mapAttributesToVertex(getIdFromVertex(typeName, instanceVertex), instance,
instanceVertex, new HashMap<Id, Vertex>(), attributeInfo, attributeInfo.dataType()); instanceVertex, new HashMap<Id, Vertex>(), attributeInfo, attributeInfo.dataType());
titanGraph.commit();
} catch (Exception e) { } catch (Exception e) {
throw new RepositoryException(e); throw new RepositoryException(e);
} finally {
titanGraph.rollback();
} }
} }
...@@ -773,8 +761,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -773,8 +761,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
IDataType elementType = ((DataTypes.MapType) attributeInfo.dataType()).getValueType(); IDataType elementType = ((DataTypes.MapType) attributeInfo.dataType()).getValueType();
for (Map.Entry entry : collection.entrySet()) { for (Map.Entry entry : collection.entrySet()) {
String myPropertyName = propertyName + "." + entry.getKey().toString(); String myPropertyName = propertyName + "." + entry.getKey().toString();
mapCollectionEntryToVertex(id, instanceVertex, attributeInfo, String value = mapCollectionEntryToVertex(id, instanceVertex, attributeInfo,
idToVertexMap, elementType, entry.getValue(), myPropertyName); idToVertexMap, elementType, entry.getValue(), myPropertyName);
instanceVertex.setProperty(myPropertyName, value);
} }
// for dereference on way out // for dereference on way out
...@@ -980,7 +969,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -980,7 +969,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
public void mapVertexToAttribute(Vertex instanceVertex, ITypedInstance typedInstance, public void mapVertexToAttribute(Vertex instanceVertex, ITypedInstance typedInstance,
AttributeInfo attributeInfo) throws MetadataException { AttributeInfo attributeInfo) throws MetadataException {
LOG.debug("mapping attributeInfo {}", attributeInfo.name); LOG.debug("Mapping attributeInfo {}", attributeInfo.name);
final IDataType dataType = attributeInfo.dataType(); final IDataType dataType = attributeInfo.dataType();
final String vertexPropertyName = getQualifiedName(typedInstance, attributeInfo); final String vertexPropertyName = getQualifiedName(typedInstance, attributeInfo);
...@@ -1112,7 +1101,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1112,7 +1101,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private void mapVertexToMapInstance(Vertex instanceVertex, ITypedInstance typedInstance, private void mapVertexToMapInstance(Vertex instanceVertex, ITypedInstance typedInstance,
AttributeInfo attributeInfo, AttributeInfo attributeInfo,
String propertyName) throws MetadataException { final String propertyName) throws MetadataException {
LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name); LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name);
List<String> keys = instanceVertex.getProperty(propertyName); List<String> keys = instanceVertex.getProperty(propertyName);
if (keys == null || keys.size() == 0) { if (keys == null || keys.size() == 0) {
...@@ -1124,21 +1113,15 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1124,21 +1113,15 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
HashMap values = new HashMap(); HashMap values = new HashMap();
for (String key : keys) { for (String key : keys) {
String keyPropertyName = propertyName + "." + key;
Object keyValue = instanceVertex.getProperty(keyPropertyName);
values.put(key, mapVertexToCollectionEntry(instanceVertex, attributeInfo, values.put(key, mapVertexToCollectionEntry(instanceVertex, attributeInfo,
valueType, propertyName, propertyName)); valueType, keyValue, propertyName));
} }
typedInstance.set(attributeInfo.name, values); typedInstance.set(attributeInfo.name, values);
} }
private String extractKey(String propertyNameWithSuffix, IDataType keyType) {
return propertyNameWithSuffix.substring(
propertyNameWithSuffix.lastIndexOf(".") + 1,
keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE
? propertyNameWithSuffix.length()
: propertyNameWithSuffix.lastIndexOf(":"));
}
private ITypedStruct getStructInstanceFromVertex(Vertex instanceVertex, private ITypedStruct getStructInstanceFromVertex(Vertex instanceVertex,
IDataType elemType, IDataType elemType,
String attributeName, String relationshipLabel, String attributeName, String relationshipLabel,
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.repository.graph; package org.apache.hadoop.metadata.repository.graph;
import com.google.inject.Provides;
import com.thinkaurelius.titan.core.TitanFactory; import com.thinkaurelius.titan.core.TitanFactory;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
...@@ -64,6 +65,7 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> { ...@@ -64,6 +65,7 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> {
@Override @Override
@Singleton @Singleton
@Provides
public TitanGraph get() { public TitanGraph get() {
Configuration config; Configuration config;
try { try {
......
...@@ -25,6 +25,7 @@ import com.tinkerpop.blueprints.Direction; ...@@ -25,6 +25,7 @@ import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.Constants; import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.graph.GraphProvider; import org.apache.hadoop.metadata.repository.graph.GraphProvider;
...@@ -76,40 +77,35 @@ public class GraphBackedTypeStore implements ITypeStore { ...@@ -76,40 +77,35 @@ public class GraphBackedTypeStore implements ITypeStore {
} }
@Override @Override
@GraphTransaction
public void store(TypeSystem typeSystem, ImmutableList<String> typeNames) throws MetadataException { public void store(TypeSystem typeSystem, ImmutableList<String> typeNames) throws MetadataException {
try { ImmutableList<String> coreTypes = typeSystem.getCoreTypes();
ImmutableList<String> coreTypes = typeSystem.getCoreTypes(); for (String typeName : typeNames) {
titanGraph.rollback(); //Cleanup previous state if (!coreTypes.contains(typeName)) {
for (String typeName : typeNames) { IDataType dataType = typeSystem.getDataType(IDataType.class, typeName);
if (!coreTypes.contains(typeName)) { LOG.debug("Processing {}.{} in type store", dataType.getTypeCategory(), dataType.getName());
IDataType dataType = typeSystem.getDataType(IDataType.class, typeName); switch (dataType.getTypeCategory()) {
LOG.debug("Processing {}.{} in type store", dataType.getTypeCategory(), dataType.getName()); case ENUM:
switch (dataType.getTypeCategory()) { storeInGraph((EnumType)dataType);
case ENUM: break;
storeInGraph((EnumType)dataType);
break; case STRUCT:
StructType structType = (StructType) dataType;
case STRUCT: storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
StructType structType = (StructType) dataType; ImmutableList.copyOf(structType.infoToNameMap.keySet()), ImmutableList.<String>of());
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(), break;
ImmutableList.copyOf(structType.infoToNameMap.keySet()), ImmutableList.<String>of());
break; case TRAIT:
case CLASS:
case TRAIT: HierarchicalType type = (HierarchicalType) dataType;
case CLASS: storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
HierarchicalType type = (HierarchicalType) dataType; type.immediateAttrs, type.superTypes);
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(), break;
type.immediateAttrs, type.superTypes);
break; default: //Ignore primitive/collection types as they are covered under references
break;
default: //Ignore primitive/collection types as they are covered under references
break;
}
} }
} }
titanGraph.commit();
} finally {
titanGraph.rollback();
} }
} }
...@@ -206,54 +202,49 @@ public class GraphBackedTypeStore implements ITypeStore { ...@@ -206,54 +202,49 @@ public class GraphBackedTypeStore implements ITypeStore {
} }
@Override @Override
@GraphTransaction
public TypesDef restore() throws MetadataException { public TypesDef restore() throws MetadataException {
try { //Get all vertices for type system
titanGraph.rollback(); //Cleanup previous state Iterator vertices =
//Get all vertices for type system titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE).vertices().iterator();
Iterator vertices =
titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE).vertices().iterator(); ImmutableList.Builder<EnumTypeDefinition> enums = ImmutableList.builder();
ImmutableList.Builder<StructTypeDefinition> structs = ImmutableList.builder();
ImmutableList.Builder<EnumTypeDefinition> enums = ImmutableList.builder(); ImmutableList.Builder<HierarchicalTypeDefinition<ClassType>> classTypes = ImmutableList.builder();
ImmutableList.Builder<StructTypeDefinition> structs = ImmutableList.builder(); ImmutableList.Builder<HierarchicalTypeDefinition<TraitType>> traits = ImmutableList.builder();
ImmutableList.Builder<HierarchicalTypeDefinition<ClassType>> classTypes = ImmutableList.builder();
ImmutableList.Builder<HierarchicalTypeDefinition<TraitType>> traits = ImmutableList.builder(); while (vertices.hasNext()) {
Vertex vertex = (Vertex) vertices.next();
while (vertices.hasNext()) { DataTypes.TypeCategory typeCategory = vertex.getProperty(Constants.TYPE_CATEGORY_PROPERTY_KEY);
Vertex vertex = (Vertex) vertices.next(); String typeName = vertex.getProperty(Constants.TYPENAME_PROPERTY_KEY);
DataTypes.TypeCategory typeCategory = vertex.getProperty(Constants.TYPE_CATEGORY_PROPERTY_KEY); LOG.info("Restoring type {}.{}", typeCategory, typeName);
String typeName = vertex.getProperty(Constants.TYPENAME_PROPERTY_KEY); switch (typeCategory) {
LOG.info("Restoring type {}.{}", typeCategory, typeName); case ENUM:
switch (typeCategory) { enums.add(getEnumType(vertex));
case ENUM: break;
enums.add(getEnumType(vertex));
break; case STRUCT:
AttributeDefinition[] attributes = getAttributes(vertex);
case STRUCT: structs.add(new StructTypeDefinition(typeName, attributes));
AttributeDefinition[] attributes = getAttributes(vertex); break;
structs.add(new StructTypeDefinition(typeName, attributes));
break; case CLASS:
ImmutableList<String> superTypes = getSuperTypes(vertex);
case CLASS: attributes = getAttributes(vertex);
ImmutableList<String> superTypes = getSuperTypes(vertex); classTypes.add(new HierarchicalTypeDefinition(ClassType.class, typeName, superTypes, attributes));
attributes = getAttributes(vertex); break;
classTypes.add(new HierarchicalTypeDefinition(ClassType.class, typeName, superTypes, attributes));
break; case TRAIT:
superTypes = getSuperTypes(vertex);
case TRAIT: attributes = getAttributes(vertex);
superTypes = getSuperTypes(vertex); traits.add(new HierarchicalTypeDefinition(TraitType.class, typeName, superTypes, attributes));
attributes = getAttributes(vertex); break;
traits.add(new HierarchicalTypeDefinition(TraitType.class, typeName, superTypes, attributes));
break; default:
throw new IllegalArgumentException("Unhandled type category " + typeCategory);
default:
throw new IllegalArgumentException("Unhandled type category " + typeCategory);
}
} }
titanGraph.commit();
return TypeUtils.getTypesDef(enums.build(), structs.build(), traits.build(), classTypes.build());
} finally {
titanGraph.rollback();
} }
return TypeUtils.getTypesDef(enums.build(), structs.build(), traits.build(), classTypes.build());
} }
private EnumTypeDefinition getEnumType(Vertex vertex) { private EnumTypeDefinition getEnumType(Vertex vertex) {
......
...@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions; ...@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker;
import org.apache.hadoop.metadata.classification.InterfaceAudience; import org.apache.hadoop.metadata.classification.InterfaceAudience;
import org.apache.hadoop.metadata.discovery.SearchIndexer; import org.apache.hadoop.metadata.discovery.SearchIndexer;
import org.apache.hadoop.metadata.listener.EntityChangeListener; import org.apache.hadoop.metadata.listener.EntityChangeListener;
...@@ -51,10 +52,13 @@ import org.slf4j.LoggerFactory; ...@@ -51,10 +52,13 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject; import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
import java.util.LinkedHashSet; import java.io.IOException;
import java.util.List; import java.io.InputStream;
import java.util.Map; import java.io.InputStreamReader;
import java.util.Set; import java.net.URL;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
/** /**
* Simple wrapper over TypeSystem and MetadataRepository services with hooks * Simple wrapper over TypeSystem and MetadataRepository services with hooks
...@@ -133,8 +137,7 @@ public class DefaultMetadataService implements MetadataService { ...@@ -133,8 +137,7 @@ public class DefaultMetadataService implements MetadataService {
@Override @Override
public JSONObject createType(String typeDefinition) throws MetadataException { public JSONObject createType(String typeDefinition) throws MetadataException {
try { try {
Preconditions.checkNotNull(typeDefinition, "type definition cannot be null"); ParamChecker.notEmpty(typeDefinition, "type definition cannot be empty");
Preconditions.checkArgument(!typeDefinition.equals(""), "type definition cannot be an empty string");
TypesDef typesDef = TypesSerialization.fromJson(typeDefinition); TypesDef typesDef = TypesSerialization.fromJson(typeDefinition);
if(typesDef.isEmpty()) if(typesDef.isEmpty())
...@@ -195,8 +198,8 @@ public class DefaultMetadataService implements MetadataService { ...@@ -195,8 +198,8 @@ public class DefaultMetadataService implements MetadataService {
*/ */
@Override @Override
public String createEntity(String entityInstanceDefinition) throws MetadataException { public String createEntity(String entityInstanceDefinition) throws MetadataException {
Preconditions.checkNotNull(entityInstanceDefinition, ParamChecker.notEmpty(entityInstanceDefinition,
"entity instance definition cannot be null"); "Entity instance definition cannot be empty");
ITypedReferenceableInstance entityTypedInstance = ITypedReferenceableInstance entityTypedInstance =
deserializeClassInstance(entityInstanceDefinition); deserializeClassInstance(entityInstanceDefinition);
...@@ -210,17 +213,14 @@ public class DefaultMetadataService implements MetadataService { ...@@ -210,17 +213,14 @@ public class DefaultMetadataService implements MetadataService {
private ITypedReferenceableInstance deserializeClassInstance( private ITypedReferenceableInstance deserializeClassInstance(
String entityInstanceDefinition) throws MetadataException { String entityInstanceDefinition) throws MetadataException {
try { final Referenceable entityInstance = InstanceSerialization.fromJsonReferenceable(
final Referenceable entityInstance = InstanceSerialization.fromJsonReferenceable( entityInstanceDefinition, true);
entityInstanceDefinition, true); final String entityTypeName = entityInstance.getTypeName();
final String entityTypeName = entityInstance.getTypeName(); ParamChecker.notEmpty(entityTypeName, "Entity type cannot be null");
Preconditions.checkNotNull(entityTypeName, "entity type cannot be null");
ClassType entityType = typeSystem.getDataType(ClassType.class, entityTypeName);
return entityType.convert(entityInstance, Multiplicity.REQUIRED);
ClassType entityType = typeSystem.getDataType(ClassType.class, entityTypeName);
return entityType.convert(entityInstance, Multiplicity.REQUIRED);
} catch (Exception e) {
throw new MetadataException("Error deserializing class instance", e);
}
} }
/** /**
...@@ -231,7 +231,7 @@ public class DefaultMetadataService implements MetadataService { ...@@ -231,7 +231,7 @@ public class DefaultMetadataService implements MetadataService {
*/ */
@Override @Override
public String getEntityDefinition(String guid) throws MetadataException { public String getEntityDefinition(String guid) throws MetadataException {
Preconditions.checkNotNull(guid, "guid cannot be null"); ParamChecker.notEmpty(guid, "guid cannot be null");
final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid); final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
return InstanceSerialization.toJson(instance, true); return InstanceSerialization.toJson(instance, true);
...@@ -252,15 +252,15 @@ public class DefaultMetadataService implements MetadataService { ...@@ -252,15 +252,15 @@ public class DefaultMetadataService implements MetadataService {
@Override @Override
public void updateEntity(String guid, String property, String value) throws MetadataException { public void updateEntity(String guid, String property, String value) throws MetadataException {
Preconditions.checkNotNull(guid, "guid cannot be null"); ParamChecker.notEmpty(guid, "guid cannot be null");
Preconditions.checkNotNull(property, "property cannot be null"); ParamChecker.notEmpty(property, "property cannot be null");
Preconditions.checkNotNull(value, "property value cannot be null"); ParamChecker.notEmpty(value, "property value cannot be null");
repository.updateEntity(guid, property, value); repository.updateEntity(guid, property, value);
} }
private void validateTypeExists(String entityType) throws MetadataException { private void validateTypeExists(String entityType) throws MetadataException {
Preconditions.checkNotNull(entityType, "entity type cannot be null"); ParamChecker.notEmpty(entityType, "entity type cannot be null");
// verify if the type exists // verify if the type exists
if (!typeSystem.isRegistered(entityType)) { if (!typeSystem.isRegistered(entityType)) {
...@@ -277,7 +277,7 @@ public class DefaultMetadataService implements MetadataService { ...@@ -277,7 +277,7 @@ public class DefaultMetadataService implements MetadataService {
*/ */
@Override @Override
public List<String> getTraitNames(String guid) throws MetadataException { public List<String> getTraitNames(String guid) throws MetadataException {
Preconditions.checkNotNull(guid, "entity GUID cannot be null"); ParamChecker.notEmpty(guid, "entity GUID cannot be null");
return repository.getTraitNames(guid); return repository.getTraitNames(guid);
} }
...@@ -291,8 +291,8 @@ public class DefaultMetadataService implements MetadataService { ...@@ -291,8 +291,8 @@ public class DefaultMetadataService implements MetadataService {
@Override @Override
public void addTrait(String guid, public void addTrait(String guid,
String traitInstanceDefinition) throws MetadataException { String traitInstanceDefinition) throws MetadataException {
Preconditions.checkNotNull(guid, "entity GUID cannot be null"); ParamChecker.notEmpty(guid, "entity GUID cannot be null");
Preconditions.checkNotNull(traitInstanceDefinition, "Trait instance cannot be null"); ParamChecker.notEmpty(traitInstanceDefinition, "Trait instance cannot be null");
ITypedStruct traitInstance = deserializeTraitInstance(traitInstanceDefinition); ITypedStruct traitInstance = deserializeTraitInstance(traitInstanceDefinition);
final String traitName = traitInstance.getTypeName(); final String traitName = traitInstance.getTypeName();
...@@ -313,7 +313,7 @@ public class DefaultMetadataService implements MetadataService { ...@@ -313,7 +313,7 @@ public class DefaultMetadataService implements MetadataService {
Struct traitInstance = InstanceSerialization.fromJsonStruct( Struct traitInstance = InstanceSerialization.fromJsonStruct(
traitInstanceDefinition, true); traitInstanceDefinition, true);
final String entityTypeName = traitInstance.getTypeName(); final String entityTypeName = traitInstance.getTypeName();
Preconditions.checkNotNull(entityTypeName, "entity type cannot be null"); ParamChecker.notEmpty(entityTypeName, "entity type cannot be null");
TraitType traitType = typeSystem.getDataType(TraitType.class, entityTypeName); TraitType traitType = typeSystem.getDataType(TraitType.class, entityTypeName);
return traitType.convert( return traitType.convert(
...@@ -333,8 +333,8 @@ public class DefaultMetadataService implements MetadataService { ...@@ -333,8 +333,8 @@ public class DefaultMetadataService implements MetadataService {
@Override @Override
public void deleteTrait(String guid, public void deleteTrait(String guid,
String traitNameToBeDeleted) throws MetadataException { String traitNameToBeDeleted) throws MetadataException {
Preconditions.checkNotNull(guid, "entity GUID cannot be null"); ParamChecker.notEmpty(guid, "entity GUID cannot be null");
Preconditions.checkNotNull(traitNameToBeDeleted, "Trait name cannot be null"); ParamChecker.notEmpty(traitNameToBeDeleted, "Trait name cannot be null");
// ensure trait type is already registered with the TS // ensure trait type is already registered with the TS
Preconditions.checkArgument(typeSystem.isRegistered(traitNameToBeDeleted), Preconditions.checkArgument(typeSystem.isRegistered(traitNameToBeDeleted),
......
...@@ -27,7 +27,6 @@ metadata.graph.index.search.backend=elasticsearch ...@@ -27,7 +27,6 @@ metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=./target/data/es metadata.graph.index.search.directory=./target/data/es
metadata.graph.index.search.elasticsearch.client-only=false metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true metadata.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=1000
######### Hive Lineage Configs ######### ######### Hive Lineage Configs #########
......
...@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions; ...@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.services.MetadataService; import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.typesystem.types.ValueConversionException;
import org.apache.hadoop.metadata.web.util.Servlets; import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
...@@ -90,11 +91,16 @@ public class EntityResource { ...@@ -90,11 +91,16 @@ public class EntityResource {
response.put(MetadataServiceClient.DEFINITION, entity); response.put(MetadataServiceClient.DEFINITION, entity);
return Response.created(locationURI).entity(response).build(); return Response.created(locationURI).entity(response).build();
} catch (MetadataException | IOException | IllegalArgumentException e) {
} catch(ValueConversionException ve) {
LOG.error("Unable to persist entity instance due to a desrialization error ", ve);
throw new WebApplicationException(
Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST));
} catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to persist entity instance", e); LOG.error("Unable to persist entity instance", e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to persist entity instance", e); LOG.error("Unable to persist entity instance", e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -123,7 +129,7 @@ public class EntityResource { ...@@ -123,7 +129,7 @@ public class EntityResource {
response.put(MetadataServiceClient.DEFINITION, entityDefinition); response.put(MetadataServiceClient.DEFINITION, entityDefinition);
status = Response.Status.OK; status = Response.Status.OK;
} else { } else {
response.put(MetadataServiceClient.ERROR, JSONObject.quote(String.format("An entity with GUID={%s} does not exist", guid))); response.put(MetadataServiceClient.ERROR, Servlets.escapeJsonString(String.format("An entity with GUID={%s} does not exist", guid)));
} }
return Response.status(status).entity(response).build(); return Response.status(status).entity(response).build();
...@@ -132,7 +138,7 @@ public class EntityResource { ...@@ -132,7 +138,7 @@ public class EntityResource {
LOG.error("An entity with GUID={} does not exist", guid, e); LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to get instance definition for GUID {}", guid, e); LOG.error("Unable to get instance definition for GUID {}", guid, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -155,7 +161,7 @@ public class EntityResource { ...@@ -155,7 +161,7 @@ public class EntityResource {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put("type", entityType); response.put(MetadataServiceClient.TYPENAME, entityType);
response.put(MetadataServiceClient.RESULTS, new JSONArray(entityList)); response.put(MetadataServiceClient.RESULTS, new JSONArray(entityList));
response.put(MetadataServiceClient.COUNT, entityList.size()); response.put(MetadataServiceClient.COUNT, entityList.size());
...@@ -168,7 +174,7 @@ public class EntityResource { ...@@ -168,7 +174,7 @@ public class EntityResource {
LOG.error("Unable to get entity list for type {}", entityType, e); LOG.error("Unable to get entity list for type {}", entityType, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to get entity list for type {}", entityType, e); LOG.error("Unable to get entity list for type {}", entityType, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -192,13 +198,13 @@ public class EntityResource { ...@@ -192,13 +198,13 @@ public class EntityResource {
metadataService.updateEntity(guid, property, value); metadataService.updateEntity(guid, property, value);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("requestId", Thread.currentThread().getName()); response.put(MetadataServiceClient.REQUEST_ID, Thread.currentThread().getName());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (MetadataException e) { } catch (MetadataException e) {
LOG.error("Unable to add property {} to entity id {}", property, guid, e); LOG.error("Unable to add property {} to entity id {}", property, guid, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to add property {} to entity id {}", property, guid, e); LOG.error("Unable to add property {} to entity id {}", property, guid, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -231,7 +237,7 @@ public class EntityResource { ...@@ -231,7 +237,7 @@ public class EntityResource {
LOG.error("Unable to get trait names for entity {}", guid, e); LOG.error("Unable to get trait names for entity {}", guid, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to get trait names for entity {}", guid, e); LOG.error("Unable to get trait names for entity {}", guid, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -267,7 +273,7 @@ public class EntityResource { ...@@ -267,7 +273,7 @@ public class EntityResource {
LOG.error("Unable to add trait for entity={}", guid, e); LOG.error("Unable to add trait for entity={}", guid, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to add trait for entity={}", guid, e); LOG.error("Unable to add trait for entity={}", guid, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -301,7 +307,7 @@ public class EntityResource { ...@@ -301,7 +307,7 @@ public class EntityResource {
LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e); LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e); LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......
...@@ -18,12 +18,11 @@ ...@@ -18,12 +18,11 @@
package org.apache.hadoop.metadata.web.resources; package org.apache.hadoop.metadata.web.resources;
import com.google.common.base.Preconditions;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker;
import org.apache.hadoop.metadata.discovery.DiscoveryException; import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.LineageService; import org.apache.hadoop.metadata.discovery.LineageService;
import org.apache.hadoop.metadata.web.util.Servlets; import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -69,10 +68,11 @@ public class HiveLineageResource { ...@@ -69,10 +68,11 @@ public class HiveLineageResource {
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response inputs(@Context HttpServletRequest request, public Response inputs(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) { @PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching lineage inputs for tableName={}", tableName); LOG.info("Fetching lineage inputs for tableName={}", tableName);
try { try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getInputs(tableName); final String jsonResult = lineageService.getInputs(tableName);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
...@@ -81,11 +81,11 @@ public class HiveLineageResource { ...@@ -81,11 +81,11 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult)); response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build(); return Response.ok(response).build();
} catch (DiscoveryException e) { } catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage inputs for table {}", tableName, e); LOG.error("Unable to get lineage inputs for table {}", tableName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to get lineage inputs for table {}", tableName, e); LOG.error("Unable to get lineage inputs for table {}", tableName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -103,10 +103,10 @@ public class HiveLineageResource { ...@@ -103,10 +103,10 @@ public class HiveLineageResource {
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response inputsGraph(@Context HttpServletRequest request, public Response inputsGraph(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) { @PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching lineage inputs graph for tableName={}", tableName); LOG.info("Fetching lineage inputs graph for tableName={}", tableName);
try { try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getInputsGraph(tableName); final String jsonResult = lineageService.getInputsGraph(tableName);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
...@@ -115,11 +115,11 @@ public class HiveLineageResource { ...@@ -115,11 +115,11 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult)); response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build(); return Response.ok(response).build();
} catch (DiscoveryException e) { } catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage inputs graph for table {}", tableName, e); LOG.error("Unable to get lineage inputs graph for table {}", tableName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to get lineage inputs graph for table {}", tableName, e); LOG.error("Unable to get lineage inputs graph for table {}", tableName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -138,10 +138,10 @@ public class HiveLineageResource { ...@@ -138,10 +138,10 @@ public class HiveLineageResource {
public Response outputs(@Context HttpServletRequest request, public Response outputs(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) { @PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching lineage outputs for tableName={}", tableName); LOG.info("Fetching lineage outputs for tableName={}", tableName);
try { try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getOutputs(tableName); final String jsonResult = lineageService.getOutputs(tableName);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
...@@ -150,11 +150,11 @@ public class HiveLineageResource { ...@@ -150,11 +150,11 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult)); response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build(); return Response.ok(response).build();
} catch (DiscoveryException e) { } catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage outputs for table {}", tableName, e); LOG.error("Unable to get lineage outputs for table {}", tableName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to get lineage outputs for table {}", tableName, e); LOG.error("Unable to get lineage outputs for table {}", tableName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -172,10 +172,11 @@ public class HiveLineageResource { ...@@ -172,10 +172,11 @@ public class HiveLineageResource {
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response outputsGraph(@Context HttpServletRequest request, public Response outputsGraph(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) { @PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching lineage outputs graph for tableName={}", tableName); LOG.info("Fetching lineage outputs graph for tableName={}", tableName);
try { try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getOutputsGraph(tableName); final String jsonResult = lineageService.getOutputsGraph(tableName);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
...@@ -184,11 +185,11 @@ public class HiveLineageResource { ...@@ -184,11 +185,11 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult)); response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build(); return Response.ok(response).build();
} catch (DiscoveryException e) { } catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage outputs graph for table {}", tableName, e); LOG.error("Unable to get lineage outputs graph for table {}", tableName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to get lineage outputs graph for table {}", tableName, e); LOG.error("Unable to get lineage outputs graph for table {}", tableName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -207,10 +208,11 @@ public class HiveLineageResource { ...@@ -207,10 +208,11 @@ public class HiveLineageResource {
public Response schema(@Context HttpServletRequest request, public Response schema(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) { @PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching schema for tableName={}", tableName); LOG.info("Fetching schema for tableName={}", tableName);
try { try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getSchema(tableName); final String jsonResult = lineageService.getSchema(tableName);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
...@@ -219,11 +221,11 @@ public class HiveLineageResource { ...@@ -219,11 +221,11 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult)); response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build(); return Response.ok(response).build();
} catch (DiscoveryException e) { } catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get schema for table {}", tableName, e); LOG.error("Unable to get schema for table {}", tableName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch (Throwable e) {
LOG.error("Unable to get schema for table {}", tableName, e); LOG.error("Unable to get schema for table {}", tableName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......
...@@ -20,6 +20,7 @@ package org.apache.hadoop.metadata.web.resources; ...@@ -20,6 +20,7 @@ package org.apache.hadoop.metadata.web.resources;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker;
import org.apache.hadoop.metadata.discovery.DiscoveryException; import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.DiscoveryService; import org.apache.hadoop.metadata.discovery.DiscoveryService;
import org.apache.hadoop.metadata.web.util.Servlets; import org.apache.hadoop.metadata.web.util.Servlets;
...@@ -76,20 +77,22 @@ public class MetadataDiscoveryResource { ...@@ -76,20 +77,22 @@ public class MetadataDiscoveryResource {
@Path("search") @Path("search")
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response search(@QueryParam("query") String query) { public Response search(@QueryParam("query") String query) {
Preconditions.checkNotNull(query, "query cannot be null");
if (query.startsWith("g.")) { // raw gremlin query
return searchUsingGremlinQuery(query);
}
JSONObject response; JSONObject response;
try { // fall back to dsl try { // fall back to dsl
ParamChecker.notEmpty(query, "query cannot be null");
if (query.startsWith("g.")) { // raw gremlin query
return searchUsingGremlinQuery(query);
}
final String jsonResultStr = discoveryService.searchByDSL(query); final String jsonResultStr = discoveryService.searchByDSL(query);
response = new DSLJSONResponseBuilder().results(jsonResultStr) response = new DSLJSONResponseBuilder().results(jsonResultStr)
.query(query) .query(query)
.build(); .build();
} catch (IllegalArgumentException e) {
LOG.error("Unable to get entity list for empty query", e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable throwable) { } catch (Throwable throwable) {
LOG.error("Unable to get entity list for query {} using dsl", query, throwable); LOG.error("Unable to get entity list for query {} using dsl", query, throwable);
...@@ -99,10 +102,10 @@ public class MetadataDiscoveryResource { ...@@ -99,10 +102,10 @@ public class MetadataDiscoveryResource {
.query(query) .query(query)
.build(); .build();
} catch (DiscoveryException e) { } catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for query {}", query, e); LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch(Throwable e) {
LOG.error("Unable to get entity list for query {}", query, e); LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -124,9 +127,8 @@ public class MetadataDiscoveryResource { ...@@ -124,9 +127,8 @@ public class MetadataDiscoveryResource {
@Path("search/dsl") @Path("search/dsl")
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response searchUsingQueryDSL(@QueryParam("query") String dslQuery) { public Response searchUsingQueryDSL(@QueryParam("query") String dslQuery) {
Preconditions.checkNotNull(dslQuery, "dslQuery cannot be null");
try { try {
ParamChecker.notEmpty(dslQuery, "dslQuery cannot be null");
final String jsonResultStr = discoveryService.searchByDSL(dslQuery); final String jsonResultStr = discoveryService.searchByDSL(dslQuery);
JSONObject response = new DSLJSONResponseBuilder().results(jsonResultStr) JSONObject response = new DSLJSONResponseBuilder().results(jsonResultStr)
...@@ -135,11 +137,11 @@ public class MetadataDiscoveryResource { ...@@ -135,11 +137,11 @@ public class MetadataDiscoveryResource {
return Response.ok(response) return Response.ok(response)
.build(); .build();
} catch (DiscoveryException e) { } catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e); LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch(Throwable e) {
LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e); LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -156,9 +158,8 @@ public class MetadataDiscoveryResource { ...@@ -156,9 +158,8 @@ public class MetadataDiscoveryResource {
@Path("search/gremlin") @Path("search/gremlin")
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response searchUsingGremlinQuery(@QueryParam("query") String gremlinQuery) { public Response searchUsingGremlinQuery(@QueryParam("query") String gremlinQuery) {
Preconditions.checkNotNull(gremlinQuery, "gremlinQuery cannot be null");
try { try {
ParamChecker.notEmpty(gremlinQuery, "gremlinQuery cannot be null or empty");
final List<Map<String, String>> results = discoveryService final List<Map<String, String>> results = discoveryService
.searchByGremlin(gremlinQuery); .searchByGremlin(gremlinQuery);
...@@ -176,11 +177,11 @@ public class MetadataDiscoveryResource { ...@@ -176,11 +177,11 @@ public class MetadataDiscoveryResource {
return Response.ok(response) return Response.ok(response)
.build(); .build();
} catch (DiscoveryException e) { } catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e); LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch(Throwable e) {
LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e); LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
...@@ -197,9 +198,8 @@ public class MetadataDiscoveryResource { ...@@ -197,9 +198,8 @@ public class MetadataDiscoveryResource {
@Path("search/fulltext") @Path("search/fulltext")
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response searchUsingFullText(@QueryParam("query") String query) { public Response searchUsingFullText(@QueryParam("query") String query) {
Preconditions.checkNotNull(query, "query cannot be null");
try { try {
ParamChecker.notEmpty(query, "query cannot be null or empty");
final String jsonResultStr = discoveryService.searchByFullText(query); final String jsonResultStr = discoveryService.searchByFullText(query);
JSONArray rowsJsonArr = new JSONArray(jsonResultStr); JSONArray rowsJsonArr = new JSONArray(jsonResultStr);
...@@ -208,10 +208,10 @@ public class MetadataDiscoveryResource { ...@@ -208,10 +208,10 @@ public class MetadataDiscoveryResource {
.build(); .build();
return Response.ok(response) return Response.ok(response)
.build(); .build();
} catch (DiscoveryException e) { } catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for query {}", query, e); LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) { } catch(Throwable e) {
LOG.error("Unable to get entity list for query {}", query, e); LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
} }
......
...@@ -83,7 +83,7 @@ public class TypesResource { ...@@ -83,7 +83,7 @@ public class TypesResource {
public Response submit(@Context HttpServletRequest request) { public Response submit(@Context HttpServletRequest request) {
try { try {
final String typeDefinition = Servlets.getRequestPayload(request); final String typeDefinition = Servlets.getRequestPayload(request);
LOG.debug("creating type with definition {} ", typeDefinition); LOG.debug("Creating type with definition {} ", typeDefinition);
JSONObject typesJson = metadataService.createType(typeDefinition); JSONObject typesJson = metadataService.createType(typeDefinition);
final JSONArray typesJsonArray = typesJson.getJSONArray(MetadataServiceClient.TYPES); final JSONArray typesJsonArray = typesJson.getJSONArray(MetadataServiceClient.TYPES);
...@@ -101,10 +101,14 @@ public class TypesResource { ...@@ -101,10 +101,14 @@ public class TypesResource {
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.TYPES, typesAddedList); response.put(MetadataServiceClient.TYPES, typesAddedList);
return Response.status(ClientResponse.Status.CREATED).entity(response).build(); return Response.status(ClientResponse.Status.CREATED).entity(response).build();
} catch (Exception e) { } catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to persist types", e); LOG.error("Unable to persist types", e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to persist types", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
} }
} }
...@@ -122,7 +126,7 @@ public class TypesResource { ...@@ -122,7 +126,7 @@ public class TypesResource {
final String typeDefinition = metadataService.getTypeDefinition(typeName); final String typeDefinition = metadataService.getTypeDefinition(typeName);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("typeName", typeName); response.put(MetadataServiceClient.TYPENAME, typeName);
response.put(MetadataServiceClient.DEFINITION, typeDefinition); response.put(MetadataServiceClient.DEFINITION, typeDefinition);
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
...@@ -131,10 +135,14 @@ public class TypesResource { ...@@ -131,10 +135,14 @@ public class TypesResource {
LOG.error("Unable to get type definition for type {}", typeName, e); LOG.error("Unable to get type definition for type {}", typeName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (JSONException e) { } catch (JSONException | IllegalArgumentException e) {
LOG.error("Unable to get type definition for type {}", typeName, e); LOG.error("Unable to get type definition for type {}", typeName, e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get type definition for type {}", typeName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
} }
} }
...@@ -165,14 +173,14 @@ public class TypesResource { ...@@ -165,14 +173,14 @@ public class TypesResource {
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (IllegalArgumentException ie) { } catch (IllegalArgumentException | MetadataException ie) {
LOG.error("Unsupported typeName while retrieving type list {}", type); LOG.error("Unsupported typeName while retrieving type list {}", type);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse("Unsupported type " + type, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse("Unsupported type " + type, Response.Status.BAD_REQUEST));
} catch (Exception e) { } catch (Throwable e) {
LOG.error("Unable to get types list", e); LOG.error("Unable to get types list", e);
throw new WebApplicationException( throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
} }
} }
} }
...@@ -6,9 +6,9 @@ ...@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* * <p/>
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* * <p/>
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
...@@ -23,6 +23,7 @@ import com.sun.jersey.api.client.ClientResponse; ...@@ -23,6 +23,7 @@ import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.MetadataServiceException;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.hadoop.metadata.typesystem.TypesDef;
...@@ -31,16 +32,7 @@ import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization$; ...@@ -31,16 +32,7 @@ import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization$;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.hadoop.metadata.typesystem.types.*;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeUtils;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
...@@ -93,7 +85,46 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -93,7 +85,46 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
} }
} }
@Test (dependsOnMethods = "testSubmitEntity") @Test
public void testSubmitEntityWithBadDateFormat() throws Exception {
try {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
databaseInstance.set("name", DATABASE_NAME);
databaseInstance.set("description", "foo database");
Referenceable tableInstance = new Referenceable(TABLE_TYPE,
"classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table");
tableInstance.set("date", "2014-07-11");
tableInstance.set("type", "managed");
tableInstance.set("level", 2);
tableInstance.set("tableType", 1); // enum
tableInstance.set("database", databaseInstance);
tableInstance.set("compressed", false);
Struct traitInstance = (Struct) tableInstance.getTrait("classification");
traitInstance.set("tag", "foundation_etl");
Struct serde1Instance = new Struct("serdeType");
serde1Instance.set("name", "serde1");
serde1Instance.set("serde", "serde1");
tableInstance.set("serde1", serde1Instance);
Struct serde2Instance = new Struct("serdeType");
serde2Instance.set("name", "serde2");
serde2Instance.set("serde", "serde2");
tableInstance.set("serde2", serde2Instance);
tableId = createInstance(tableInstance);
Assert.fail("Was expecting an exception here ");
} catch (MetadataServiceException e) {
Assert.assertTrue(e.getMessage().contains("\"error\":\"Cannot convert value '2014-07-11' to datatype date\""));
}
}
@Test(dependsOnMethods = "testSubmitEntity")
public void testAddProperty() throws Exception { public void testAddProperty() throws Exception {
final String guid = tableId._getId(); final String guid = tableId._getId();
//add property //add property
...@@ -120,7 +151,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -120,7 +151,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
tableInstance.set("level", 4); tableInstance.set("level", 4);
} }
@Test (dependsOnMethods = "testSubmitEntity") @Test(dependsOnMethods = "testSubmitEntity")
public void testAddReferenceProperty() throws Exception { public void testAddReferenceProperty() throws Exception {
//Create new db instance //Create new db instance
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE); Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
...@@ -242,6 +273,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -242,6 +273,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(response.get(MetadataServiceClient.STACKTRACE)); Assert.assertNotNull(response.get(MetadataServiceClient.STACKTRACE));
} }
@Test @Test
public void testGetEntityListForNoInstances() throws Exception { public void testGetEntityListForNoInstances() throws Exception {
addNewType(); addNewType();
...@@ -275,7 +307,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -275,7 +307,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
createType(typesAsJSON); createType(typesAsJSON);
} }
@Test (dependsOnMethods = "testSubmitEntity") @Test(dependsOnMethods = "testSubmitEntity")
public void testGetTraitNames() throws Exception { public void testGetTraitNames() throws Exception {
final String guid = tableId._getId(); final String guid = tableId._getId();
ClientResponse clientResponse = service ClientResponse clientResponse = service
...@@ -298,7 +330,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -298,7 +330,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertEquals(list.length(), 7); Assert.assertEquals(list.length(), 7);
} }
@Test (dependsOnMethods = "testGetTraitNames") @Test(dependsOnMethods = "testGetTraitNames")
public void testAddTrait() throws Exception { public void testAddTrait() throws Exception {
final String traitName = "PII_Trait"; final String traitName = "PII_Trait";
HierarchicalTypeDefinition<TraitType> piiTrait = HierarchicalTypeDefinition<TraitType> piiTrait =
...@@ -352,7 +384,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -352,7 +384,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode()); Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
} }
@Test (dependsOnMethods = "testAddTrait") @Test(dependsOnMethods = "testAddTrait")
public void testDeleteTrait() throws Exception { public void testDeleteTrait() throws Exception {
final String traitName = "PII_Trait"; final String traitName = "PII_Trait";
final String guid = tableId._getId(); final String guid = tableId._getId();
...@@ -454,6 +486,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -454,6 +486,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE), TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE), TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("date", DataTypes.DATE_TYPE),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE), TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE),
new AttributeDefinition("tableType", "tableType", new AttributeDefinition("tableType", "tableType",
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
...@@ -501,6 +534,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -501,6 +534,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
"classification", "pii", "phi", "pci", "sox", "sec", "finance"); "classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set("name", TABLE_NAME); tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table"); tableInstance.set("description", "bar table");
tableInstance.set("date", "2014-07-11T08:00:00.000Z");
tableInstance.set("type", "managed"); tableInstance.set("type", "managed");
tableInstance.set("level", 2); tableInstance.set("level", 2);
tableInstance.set("tableType", 1); // enum tableInstance.set("tableType", 1); // enum
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment