Commit 7cf0db5b by Shwetha GS

Merge branch 'master' into dal

parents c4a7d6f7 d86caa89
......@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
......@@ -32,6 +33,7 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.util.Map;
public class HiveHookIT {
private static final String DGI_URL = "http://localhost:21000/";
......@@ -76,8 +78,13 @@ public class HiveHookIT {
@Test
public void testCreateDatabase() throws Exception {
String dbName = "db" + random();
runCommand("create database " + dbName);
assertDatabaseIsRegistered(dbName);
runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1', 'p2'='v2')");
String dbId = assertDatabaseIsRegistered(dbName);
Referenceable definition = dgiCLient.getEntity(dbId);
Map params = (Map) definition.get("parameters");
Assert.assertNotNull(params);
Assert.assertEquals(params.size(), 2);
Assert.assertEquals(params.get("p1"), "v1");
//There should be just one entity per dbname
runCommand("drop database " + dbName);
......@@ -213,10 +220,10 @@ public class HiveHookIT {
assertEntityIsRegistered(query);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception {
private String assertDatabaseIsRegistered(String dbName) throws Exception {
String query = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(),
dbName, CLUSTER_NAME);
assertEntityIsRegistered(query);
return assertEntityIsRegistered(query);
}
private void assertPartitionIsRegistered(String dbName, String tableName, String value) throws Exception {
......@@ -233,8 +240,9 @@ public class HiveHookIT {
Assert.assertEquals(results.length(), 1);
}
private void assertEntityIsRegistered(String dslQuery) throws Exception{
private String assertEntityIsRegistered(String dslQuery) throws Exception{
JSONArray results = dgiCLient.searchByDSL(dslQuery);
Assert.assertEquals(results.length(), 1);
return results.getJSONObject(0).getJSONObject("$id$").getString("id");
}
}
......@@ -51,7 +51,7 @@ public class MetadataServiceClient {
public static final String NAME = "name";
public static final String GUID = "GUID";
public static final String TYPENAME = "typeName";
public static final String TYPE = "type";
public static final String DEFINITION = "definition";
public static final String ERROR = "error";
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD)
public @interface GraphTransaction {}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata;
import com.google.inject.Inject;
import com.thinkaurelius.titan.core.TitanGraph;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GraphTransactionInterceptor implements MethodInterceptor {
private static final Logger LOG = LoggerFactory.getLogger(GraphTransactionInterceptor.class);
private TitanGraph titanGraph;
@Inject
GraphProvider<TitanGraph> graphProvider;
public Object invoke(MethodInvocation invocation) throws Throwable {
if (titanGraph == null) {
titanGraph = graphProvider.get();
}
try {
LOG.debug("graph rollback to cleanup previous state");
titanGraph.rollback(); //cleanup previous state
Object response = invocation.proceed();
titanGraph.commit();
LOG.debug("graph commit");
return response;
} catch (Throwable t){
titanGraph.rollback();
LOG.debug("graph rollback");
throw t;
}
}
}
......@@ -19,8 +19,10 @@
package org.apache.hadoop.metadata;
import com.google.inject.Scopes;
import com.google.inject.matcher.Matchers;
import com.google.inject.throwingproviders.ThrowingProviderBinder;
import com.thinkaurelius.titan.core.TitanGraph;
import org.aopalliance.intercept.MethodInterceptor;
import org.apache.hadoop.metadata.discovery.DiscoveryService;
import org.apache.hadoop.metadata.discovery.HiveLineageService;
import org.apache.hadoop.metadata.discovery.LineageService;
......@@ -40,31 +42,7 @@ import org.apache.hadoop.metadata.services.MetadataService;
* Guice module for Repository module.
*/
public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// Graph Service implementation class
// private Class<? extends GraphService> graphServiceClass;
// MetadataRepositoryService implementation class
private Class<? extends MetadataRepository> metadataRepoClass;
private Class<? extends ITypeStore> typeStore;
private Class<? extends MetadataService> metadataService;
private Class<? extends DiscoveryService> discoveryService;
private Class<? extends SearchIndexer> searchIndexer;
private Class<? extends LineageService> lineageService;
public RepositoryMetadataModule() {
// GraphServiceConfigurator gsp = new GraphServiceConfigurator();
// get the impl classes for the repo and the graph service
// this.graphServiceClass = gsp.getImplClass();
this.metadataRepoClass = GraphBackedMetadataRepository.class;
this.typeStore = GraphBackedTypeStore.class;
this.metadataService = DefaultMetadataService.class;
this.discoveryService = GraphBackedDiscoveryService.class;
this.searchIndexer = GraphBackedSearchIndexer.class;
this.lineageService = HiveLineageService.class;
}
@Override
protected void configure() {
// special wiring for Titan Graph
ThrowingProviderBinder.create(binder())
......@@ -75,22 +53,26 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// allow for dynamic binding of the metadata repo & graph service
// bind the MetadataRepositoryService interface to an implementation
bind(MetadataRepository.class).to(metadataRepoClass);
bind(MetadataRepository.class).to(GraphBackedMetadataRepository.class);
// bind the ITypeStore interface to an implementation
bind(ITypeStore.class).to(typeStore);
bind(ITypeStore.class).to(GraphBackedTypeStore.class);
// bind the GraphService interface to an implementation
// bind(GraphService.class).to(graphServiceClass);
// bind the MetadataService interface to an implementation
bind(MetadataService.class).to(metadataService);
bind(MetadataService.class).to(DefaultMetadataService.class);
// bind the DiscoveryService interface to an implementation
bind(DiscoveryService.class).to(discoveryService);
bind(DiscoveryService.class).to(GraphBackedDiscoveryService.class);
bind(SearchIndexer.class).to(GraphBackedSearchIndexer.class);
bind(SearchIndexer.class).to(searchIndexer);
bind(LineageService.class).to(HiveLineageService.class);
bind(LineageService.class).to(lineageService);
MethodInterceptor interceptor = new GraphTransactionInterceptor();
requestInjection(interceptor);
bindInterceptor(Matchers.any(), Matchers.annotatedWith(GraphTransaction.class), interceptor);
}
}
......@@ -20,6 +20,7 @@ package org.apache.hadoop.metadata.discovery;
import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.discovery.graph.DefaultGraphPersistenceStrategy;
......@@ -96,6 +97,7 @@ public class HiveLineageService implements LineageService {
* @return Lineage Outputs as JSON
*/
@Override
@GraphTransaction
public String getOutputs(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage outputs for tableName={}", tableName);
......@@ -121,6 +123,7 @@ public class HiveLineageService implements LineageService {
* @return Outputs Graph as JSON
*/
@Override
@GraphTransaction
public String getOutputsGraph(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage outputs graph for tableName={}", tableName);
......@@ -139,6 +142,7 @@ public class HiveLineageService implements LineageService {
* @return Lineage Inputs as JSON
*/
@Override
@GraphTransaction
public String getInputs(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage inputs for tableName={}", tableName);
......@@ -164,6 +168,7 @@ public class HiveLineageService implements LineageService {
* @return Inputs Graph as JSON
*/
@Override
@GraphTransaction
public String getInputsGraph(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage inputs graph for tableName={}", tableName);
......@@ -182,6 +187,7 @@ public class HiveLineageService implements LineageService {
* @return Schema as JSON
*/
@Override
@GraphTransaction
public String getSchema(String tableName) throws DiscoveryException {
// todo - validate if indeed this is a table type and exists
String schemaQuery = HIVE_TABLE_TYPE_NAME
......
......@@ -25,6 +25,7 @@ import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.gremlin.groovy.Gremlin;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.DiscoveryService;
......@@ -82,6 +83,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
//http://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query
// .html#query-string-syntax for query syntax
@Override
@GraphTransaction
public String searchByFullText(String query) throws DiscoveryException {
String graphQuery = String.format("v.%s:(%s)", Constants.ENTITY_TEXT_PROPERTY_KEY, query);
LOG.debug("Full text query: {}", graphQuery);
......@@ -118,6 +120,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
* @return JSON representing the type and results.
*/
@Override
@GraphTransaction
public String searchByDSL(String dslQuery) throws DiscoveryException {
LOG.info("Executing dsl query={}", dslQuery);
try {
......@@ -155,6 +158,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
* @throws org.apache.hadoop.metadata.discovery.DiscoveryException
*/
@Override
@GraphTransaction
public List<Map<String, String>> searchByGremlin(String gremlinQuery)
throws DiscoveryException {
LOG.info("Executing gremlin query={}", gremlinQuery);
......
......@@ -27,6 +27,7 @@ import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.MetadataRepository;
......@@ -137,27 +138,23 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
@Override
@GraphTransaction
public String createEntity(IReferenceableInstance typedInstance) throws RepositoryException {
LOG.info("adding entity={}", typedInstance);
try {
titanGraph.rollback();
final String guid = instanceToGraphMapper.mapTypedInstanceToGraph(typedInstance);
titanGraph.commit(); // commit if there are no errors
return guid;
} catch (MetadataException e) {
titanGraph.rollback();
throw new RepositoryException(e);
}
}
@Override
@GraphTransaction
public ITypedReferenceableInstance getEntityDefinition(String guid) throws RepositoryException {
LOG.info("Retrieving entity with guid={}", guid);
try {
titanGraph.rollback(); // clean up before starting a query
Vertex instanceVertex = getVertexForGUID(guid);
LOG.debug("Found a vertex {} for guid {}", instanceVertex, guid);
......@@ -206,9 +203,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
* @throws RepositoryException
*/
@Override
@GraphTransaction
public List<String> getTraitNames(String guid) throws RepositoryException {
LOG.info("Retrieving trait names for entity={}", guid);
titanGraph.rollback(); // clean up before starting a query
Vertex instanceVertex = getVertexForGUID(guid);
return getTraitNames(instanceVertex);
}
......@@ -231,6 +228,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
* @throws RepositoryException
*/
@Override
@GraphTransaction
public void addTrait(String guid,
ITypedStruct traitInstance) throws RepositoryException {
Preconditions.checkNotNull(traitInstance, "Trait instance cannot be null");
......@@ -238,22 +236,18 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
LOG.info("Adding a new trait={} for entity={}", traitName, guid);
try {
titanGraph.rollback(); // clean up before starting a query
Vertex instanceVertex = getVertexForGUID(guid);
// add the trait instance as a new vertex
final String typeName = getTypeName(instanceVertex);
instanceToGraphMapper.mapTraitInstanceToVertex(traitInstance,
getIdFromVertex(typeName, instanceVertex),
instanceToGraphMapper.mapTraitInstanceToVertex(traitInstance, getIdFromVertex(typeName, instanceVertex),
typeName, instanceVertex, Collections.<Id, Vertex>emptyMap());
// update the traits in entity once adding trait instance is successful
((TitanVertex) instanceVertex)
.addProperty(Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
titanGraph.commit(); // commit if there are no errors
} catch (MetadataException e) {
titanGraph.rollback();
throw new RepositoryException(e);
}
}
......@@ -266,11 +260,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
* @throws RepositoryException
*/
@Override
@GraphTransaction
public void deleteTrait(String guid, String traitNameToBeDeleted)
throws RepositoryException {
LOG.info("Deleting trait={} from entity={}", traitNameToBeDeleted, guid);
try {
titanGraph.rollback(); // clean up before starting a query
Vertex instanceVertex = getVertexForGUID(guid);
List<String> traitNames = getTraitNames(instanceVertex);
......@@ -297,11 +291,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
traitNames.remove(traitNameToBeDeleted);
updateTraits(instanceVertex, traitNames);
}
titanGraph.commit(); // commit if there are no errors
}
} catch (Exception e) {
titanGraph.rollback();
throw new RepositoryException(e);
}
}
......@@ -318,11 +309,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
@Override
@GraphTransaction
public void updateEntity(String guid, String property, String value) throws RepositoryException {
LOG.info("Adding property {} for entity guid {}", property, guid);
try {
titanGraph.rollback(); // clean up before starting a query
Vertex instanceVertex = GraphHelper.findVertexByGUID(titanGraph, guid);
if (instanceVertex == null) {
throw new RepositoryException("Could not find a vertex for guid " + guid);
......@@ -349,11 +340,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
instanceToGraphMapper.mapAttributesToVertex(getIdFromVertex(typeName, instanceVertex), instance,
instanceVertex, new HashMap<Id, Vertex>(), attributeInfo, attributeInfo.dataType());
titanGraph.commit();
} catch (Exception e) {
throw new RepositoryException(e);
} finally {
titanGraph.rollback();
}
}
......@@ -773,8 +761,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
IDataType elementType = ((DataTypes.MapType) attributeInfo.dataType()).getValueType();
for (Map.Entry entry : collection.entrySet()) {
String myPropertyName = propertyName + "." + entry.getKey().toString();
mapCollectionEntryToVertex(id, instanceVertex, attributeInfo,
String value = mapCollectionEntryToVertex(id, instanceVertex, attributeInfo,
idToVertexMap, elementType, entry.getValue(), myPropertyName);
instanceVertex.setProperty(myPropertyName, value);
}
// for dereference on way out
......@@ -980,7 +969,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
public void mapVertexToAttribute(Vertex instanceVertex, ITypedInstance typedInstance,
AttributeInfo attributeInfo) throws MetadataException {
LOG.debug("mapping attributeInfo {}", attributeInfo.name);
LOG.debug("Mapping attributeInfo {}", attributeInfo.name);
final IDataType dataType = attributeInfo.dataType();
final String vertexPropertyName = getQualifiedName(typedInstance, attributeInfo);
......@@ -1112,7 +1101,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
@SuppressWarnings("unchecked")
private void mapVertexToMapInstance(Vertex instanceVertex, ITypedInstance typedInstance,
AttributeInfo attributeInfo,
String propertyName) throws MetadataException {
final String propertyName) throws MetadataException {
LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name);
List<String> keys = instanceVertex.getProperty(propertyName);
if (keys == null || keys.size() == 0) {
......@@ -1124,21 +1113,15 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
HashMap values = new HashMap();
for (String key : keys) {
String keyPropertyName = propertyName + "." + key;
Object keyValue = instanceVertex.getProperty(keyPropertyName);
values.put(key, mapVertexToCollectionEntry(instanceVertex, attributeInfo,
valueType, propertyName, propertyName));
valueType, keyValue, propertyName));
}
typedInstance.set(attributeInfo.name, values);
}
private String extractKey(String propertyNameWithSuffix, IDataType keyType) {
return propertyNameWithSuffix.substring(
propertyNameWithSuffix.lastIndexOf(".") + 1,
keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE
? propertyNameWithSuffix.length()
: propertyNameWithSuffix.lastIndexOf(":"));
}
private ITypedStruct getStructInstanceFromVertex(Vertex instanceVertex,
IDataType elemType,
String attributeName, String relationshipLabel,
......
......@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.repository.graph;
import com.google.inject.Provides;
import com.thinkaurelius.titan.core.TitanFactory;
import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.commons.configuration.Configuration;
......@@ -64,6 +65,7 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> {
@Override
@Singleton
@Provides
public TitanGraph get() {
Configuration config;
try {
......
......@@ -25,6 +25,7 @@ import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.graph.GraphProvider;
......@@ -76,10 +77,9 @@ public class GraphBackedTypeStore implements ITypeStore {
}
@Override
@GraphTransaction
public void store(TypeSystem typeSystem, ImmutableList<String> typeNames) throws MetadataException {
try {
ImmutableList<String> coreTypes = typeSystem.getCoreTypes();
titanGraph.rollback(); //Cleanup previous state
for (String typeName : typeNames) {
if (!coreTypes.contains(typeName)) {
IDataType dataType = typeSystem.getDataType(IDataType.class, typeName);
......@@ -107,10 +107,6 @@ public class GraphBackedTypeStore implements ITypeStore {
}
}
}
titanGraph.commit();
} finally {
titanGraph.rollback();
}
}
private void storeInGraph(EnumType dataType) {
......@@ -206,9 +202,8 @@ public class GraphBackedTypeStore implements ITypeStore {
}
@Override
@GraphTransaction
public TypesDef restore() throws MetadataException {
try {
titanGraph.rollback(); //Cleanup previous state
//Get all vertices for type system
Iterator vertices =
titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE).vertices().iterator();
......@@ -249,11 +244,7 @@ public class GraphBackedTypeStore implements ITypeStore {
throw new IllegalArgumentException("Unhandled type category " + typeCategory);
}
}
titanGraph.commit();
return TypeUtils.getTypesDef(enums.build(), structs.build(), traits.build(), classTypes.build());
} finally {
titanGraph.rollback();
}
}
private EnumTypeDefinition getEnumType(Vertex vertex) {
......
......@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker;
import org.apache.hadoop.metadata.classification.InterfaceAudience;
import org.apache.hadoop.metadata.discovery.SearchIndexer;
import org.apache.hadoop.metadata.listener.EntityChangeListener;
......@@ -51,10 +52,13 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* Simple wrapper over TypeSystem and MetadataRepository services with hooks
......@@ -133,8 +137,7 @@ public class DefaultMetadataService implements MetadataService {
@Override
public JSONObject createType(String typeDefinition) throws MetadataException {
try {
Preconditions.checkNotNull(typeDefinition, "type definition cannot be null");
Preconditions.checkArgument(!typeDefinition.equals(""), "type definition cannot be an empty string");
ParamChecker.notEmpty(typeDefinition, "type definition cannot be empty");
TypesDef typesDef = TypesSerialization.fromJson(typeDefinition);
if(typesDef.isEmpty())
......@@ -195,8 +198,8 @@ public class DefaultMetadataService implements MetadataService {
*/
@Override
public String createEntity(String entityInstanceDefinition) throws MetadataException {
Preconditions.checkNotNull(entityInstanceDefinition,
"entity instance definition cannot be null");
ParamChecker.notEmpty(entityInstanceDefinition,
"Entity instance definition cannot be empty");
ITypedReferenceableInstance entityTypedInstance =
deserializeClassInstance(entityInstanceDefinition);
......@@ -210,17 +213,14 @@ public class DefaultMetadataService implements MetadataService {
private ITypedReferenceableInstance deserializeClassInstance(
String entityInstanceDefinition) throws MetadataException {
try {
final Referenceable entityInstance = InstanceSerialization.fromJsonReferenceable(
entityInstanceDefinition, true);
final String entityTypeName = entityInstance.getTypeName();
Preconditions.checkNotNull(entityTypeName, "entity type cannot be null");
ParamChecker.notEmpty(entityTypeName, "Entity type cannot be null");
ClassType entityType = typeSystem.getDataType(ClassType.class, entityTypeName);
return entityType.convert(entityInstance, Multiplicity.REQUIRED);
} catch (Exception e) {
throw new MetadataException("Error deserializing class instance", e);
}
}
/**
......@@ -231,7 +231,7 @@ public class DefaultMetadataService implements MetadataService {
*/
@Override
public String getEntityDefinition(String guid) throws MetadataException {
Preconditions.checkNotNull(guid, "guid cannot be null");
ParamChecker.notEmpty(guid, "guid cannot be null");
final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
return InstanceSerialization.toJson(instance, true);
......@@ -252,15 +252,15 @@ public class DefaultMetadataService implements MetadataService {
@Override
public void updateEntity(String guid, String property, String value) throws MetadataException {
Preconditions.checkNotNull(guid, "guid cannot be null");
Preconditions.checkNotNull(property, "property cannot be null");
Preconditions.checkNotNull(value, "property value cannot be null");
ParamChecker.notEmpty(guid, "guid cannot be null");
ParamChecker.notEmpty(property, "property cannot be null");
ParamChecker.notEmpty(value, "property value cannot be null");
repository.updateEntity(guid, property, value);
}
private void validateTypeExists(String entityType) throws MetadataException {
Preconditions.checkNotNull(entityType, "entity type cannot be null");
ParamChecker.notEmpty(entityType, "entity type cannot be null");
// verify if the type exists
if (!typeSystem.isRegistered(entityType)) {
......@@ -277,7 +277,7 @@ public class DefaultMetadataService implements MetadataService {
*/
@Override
public List<String> getTraitNames(String guid) throws MetadataException {
Preconditions.checkNotNull(guid, "entity GUID cannot be null");
ParamChecker.notEmpty(guid, "entity GUID cannot be null");
return repository.getTraitNames(guid);
}
......@@ -291,8 +291,8 @@ public class DefaultMetadataService implements MetadataService {
@Override
public void addTrait(String guid,
String traitInstanceDefinition) throws MetadataException {
Preconditions.checkNotNull(guid, "entity GUID cannot be null");
Preconditions.checkNotNull(traitInstanceDefinition, "Trait instance cannot be null");
ParamChecker.notEmpty(guid, "entity GUID cannot be null");
ParamChecker.notEmpty(traitInstanceDefinition, "Trait instance cannot be null");
ITypedStruct traitInstance = deserializeTraitInstance(traitInstanceDefinition);
final String traitName = traitInstance.getTypeName();
......@@ -313,7 +313,7 @@ public class DefaultMetadataService implements MetadataService {
Struct traitInstance = InstanceSerialization.fromJsonStruct(
traitInstanceDefinition, true);
final String entityTypeName = traitInstance.getTypeName();
Preconditions.checkNotNull(entityTypeName, "entity type cannot be null");
ParamChecker.notEmpty(entityTypeName, "entity type cannot be null");
TraitType traitType = typeSystem.getDataType(TraitType.class, entityTypeName);
return traitType.convert(
......@@ -333,8 +333,8 @@ public class DefaultMetadataService implements MetadataService {
@Override
public void deleteTrait(String guid,
String traitNameToBeDeleted) throws MetadataException {
Preconditions.checkNotNull(guid, "entity GUID cannot be null");
Preconditions.checkNotNull(traitNameToBeDeleted, "Trait name cannot be null");
ParamChecker.notEmpty(guid, "entity GUID cannot be null");
ParamChecker.notEmpty(traitNameToBeDeleted, "Trait name cannot be null");
// ensure trait type is already registered with the TS
Preconditions.checkArgument(typeSystem.isRegistered(traitNameToBeDeleted),
......
......@@ -27,7 +27,6 @@ metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=./target/data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=1000
######### Hive Lineage Configs #########
......
......@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.typesystem.types.ValueConversionException;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
......@@ -90,11 +91,16 @@ public class EntityResource {
response.put(MetadataServiceClient.DEFINITION, entity);
return Response.created(locationURI).entity(response).build();
} catch (MetadataException | IOException | IllegalArgumentException e) {
} catch(ValueConversionException ve) {
LOG.error("Unable to persist entity instance due to a desrialization error ", ve);
throw new WebApplicationException(
Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST));
} catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to persist entity instance", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to persist entity instance", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -123,7 +129,7 @@ public class EntityResource {
response.put(MetadataServiceClient.DEFINITION, entityDefinition);
status = Response.Status.OK;
} else {
response.put(MetadataServiceClient.ERROR, JSONObject.quote(String.format("An entity with GUID={%s} does not exist", guid)));
response.put(MetadataServiceClient.ERROR, Servlets.escapeJsonString(String.format("An entity with GUID={%s} does not exist", guid)));
}
return Response.status(status).entity(response).build();
......@@ -132,7 +138,7 @@ public class EntityResource {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to get instance definition for GUID {}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -155,7 +161,7 @@ public class EntityResource {
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put("type", entityType);
response.put(MetadataServiceClient.TYPENAME, entityType);
response.put(MetadataServiceClient.RESULTS, new JSONArray(entityList));
response.put(MetadataServiceClient.COUNT, entityList.size());
......@@ -168,7 +174,7 @@ public class EntityResource {
LOG.error("Unable to get entity list for type {}", entityType, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to get entity list for type {}", entityType, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -192,13 +198,13 @@ public class EntityResource {
metadataService.updateEntity(guid, property, value);
JSONObject response = new JSONObject();
response.put("requestId", Thread.currentThread().getName());
response.put(MetadataServiceClient.REQUEST_ID, Thread.currentThread().getName());
return Response.ok(response).build();
} catch (MetadataException e) {
LOG.error("Unable to add property {} to entity id {}", property, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to add property {} to entity id {}", property, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -231,7 +237,7 @@ public class EntityResource {
LOG.error("Unable to get trait names for entity {}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to get trait names for entity {}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -267,7 +273,7 @@ public class EntityResource {
LOG.error("Unable to add trait for entity={}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to add trait for entity={}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -301,7 +307,7 @@ public class EntityResource {
LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......
......@@ -18,12 +18,11 @@
package org.apache.hadoop.metadata.web.resources;
import com.google.common.base.Preconditions;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker;
import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.LineageService;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -69,10 +68,11 @@ public class HiveLineageResource {
@Produces(MediaType.APPLICATION_JSON)
public Response inputs(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching lineage inputs for tableName={}", tableName);
try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getInputs(tableName);
JSONObject response = new JSONObject();
......@@ -81,11 +81,11 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (DiscoveryException e) {
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage inputs for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to get lineage inputs for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -103,10 +103,10 @@ public class HiveLineageResource {
@Produces(MediaType.APPLICATION_JSON)
public Response inputsGraph(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching lineage inputs graph for tableName={}", tableName);
try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getInputsGraph(tableName);
JSONObject response = new JSONObject();
......@@ -115,11 +115,11 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (DiscoveryException e) {
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage inputs graph for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to get lineage inputs graph for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -138,10 +138,10 @@ public class HiveLineageResource {
public Response outputs(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching lineage outputs for tableName={}", tableName);
try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getOutputs(tableName);
JSONObject response = new JSONObject();
......@@ -150,11 +150,11 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (DiscoveryException e) {
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage outputs for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to get lineage outputs for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -172,10 +172,11 @@ public class HiveLineageResource {
@Produces(MediaType.APPLICATION_JSON)
public Response outputsGraph(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching lineage outputs graph for tableName={}", tableName);
try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getOutputsGraph(tableName);
JSONObject response = new JSONObject();
......@@ -184,11 +185,11 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (DiscoveryException e) {
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage outputs graph for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to get lineage outputs graph for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -207,10 +208,11 @@ public class HiveLineageResource {
public Response schema(@Context HttpServletRequest request,
@PathParam("tableName") String tableName) {
Preconditions.checkNotNull(tableName, "table name cannot be null");
LOG.info("Fetching schema for tableName={}", tableName);
try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getSchema(tableName);
JSONObject response = new JSONObject();
......@@ -219,11 +221,11 @@ public class HiveLineageResource {
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (DiscoveryException e) {
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get schema for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch (Throwable e) {
LOG.error("Unable to get schema for table {}", tableName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......
......@@ -20,6 +20,7 @@ package org.apache.hadoop.metadata.web.resources;
import com.google.common.base.Preconditions;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker;
import org.apache.hadoop.metadata.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.DiscoveryService;
import org.apache.hadoop.metadata.web.util.Servlets;
......@@ -76,20 +77,22 @@ public class MetadataDiscoveryResource {
@Path("search")
@Produces(MediaType.APPLICATION_JSON)
public Response search(@QueryParam("query") String query) {
Preconditions.checkNotNull(query, "query cannot be null");
JSONObject response;
try { // fall back to dsl
ParamChecker.notEmpty(query, "query cannot be null");
if (query.startsWith("g.")) { // raw gremlin query
return searchUsingGremlinQuery(query);
}
JSONObject response;
try { // fall back to dsl
final String jsonResultStr = discoveryService.searchByDSL(query);
response = new DSLJSONResponseBuilder().results(jsonResultStr)
.query(query)
.build();
} catch (IllegalArgumentException e) {
LOG.error("Unable to get entity list for empty query", e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable throwable) {
LOG.error("Unable to get entity list for query {} using dsl", query, throwable);
......@@ -99,10 +102,10 @@ public class MetadataDiscoveryResource {
.query(query)
.build();
} catch (DiscoveryException e) {
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch(Throwable e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -124,9 +127,8 @@ public class MetadataDiscoveryResource {
@Path("search/dsl")
@Produces(MediaType.APPLICATION_JSON)
public Response searchUsingQueryDSL(@QueryParam("query") String dslQuery) {
Preconditions.checkNotNull(dslQuery, "dslQuery cannot be null");
try {
ParamChecker.notEmpty(dslQuery, "dslQuery cannot be null");
final String jsonResultStr = discoveryService.searchByDSL(dslQuery);
JSONObject response = new DSLJSONResponseBuilder().results(jsonResultStr)
......@@ -135,11 +137,11 @@ public class MetadataDiscoveryResource {
return Response.ok(response)
.build();
} catch (DiscoveryException e) {
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch(Throwable e) {
LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -156,9 +158,8 @@ public class MetadataDiscoveryResource {
@Path("search/gremlin")
@Produces(MediaType.APPLICATION_JSON)
public Response searchUsingGremlinQuery(@QueryParam("query") String gremlinQuery) {
Preconditions.checkNotNull(gremlinQuery, "gremlinQuery cannot be null");
try {
ParamChecker.notEmpty(gremlinQuery, "gremlinQuery cannot be null or empty");
final List<Map<String, String>> results = discoveryService
.searchByGremlin(gremlinQuery);
......@@ -176,11 +177,11 @@ public class MetadataDiscoveryResource {
return Response.ok(response)
.build();
} catch (DiscoveryException e) {
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch(Throwable e) {
LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
......@@ -197,9 +198,8 @@ public class MetadataDiscoveryResource {
@Path("search/fulltext")
@Produces(MediaType.APPLICATION_JSON)
public Response searchUsingFullText(@QueryParam("query") String query) {
Preconditions.checkNotNull(query, "query cannot be null");
try {
ParamChecker.notEmpty(query, "query cannot be null or empty");
final String jsonResultStr = discoveryService.searchByFullText(query);
JSONArray rowsJsonArr = new JSONArray(jsonResultStr);
......@@ -208,10 +208,10 @@ public class MetadataDiscoveryResource {
.build();
return Response.ok(response)
.build();
} catch (DiscoveryException e) {
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (JSONException e) {
} catch(Throwable e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
......
......@@ -83,7 +83,7 @@ public class TypesResource {
public Response submit(@Context HttpServletRequest request) {
try {
final String typeDefinition = Servlets.getRequestPayload(request);
LOG.debug("creating type with definition {} ", typeDefinition);
LOG.debug("Creating type with definition {} ", typeDefinition);
JSONObject typesJson = metadataService.createType(typeDefinition);
final JSONArray typesJsonArray = typesJson.getJSONArray(MetadataServiceClient.TYPES);
......@@ -101,10 +101,14 @@ public class TypesResource {
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.TYPES, typesAddedList);
return Response.status(ClientResponse.Status.CREATED).entity(response).build();
} catch (Exception e) {
} catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to persist types", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to persist types", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -122,7 +126,7 @@ public class TypesResource {
final String typeDefinition = metadataService.getTypeDefinition(typeName);
JSONObject response = new JSONObject();
response.put("typeName", typeName);
response.put(MetadataServiceClient.TYPENAME, typeName);
response.put(MetadataServiceClient.DEFINITION, typeDefinition);
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
......@@ -131,10 +135,14 @@ public class TypesResource {
LOG.error("Unable to get type definition for type {}", typeName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (JSONException e) {
} catch (JSONException | IllegalArgumentException e) {
LOG.error("Unable to get type definition for type {}", typeName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get type definition for type {}", typeName, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
......@@ -165,14 +173,14 @@ public class TypesResource {
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build();
} catch (IllegalArgumentException ie) {
} catch (IllegalArgumentException | MetadataException ie) {
LOG.error("Unsupported typeName while retrieving type list {}", type);
throw new WebApplicationException(
Servlets.getErrorResponse("Unsupported type " + type, Response.Status.BAD_REQUEST));
} catch (Exception e) {
} catch (Throwable e) {
LOG.error("Unable to get types list", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
}
......@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
*
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
......@@ -23,6 +23,7 @@ import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.MetadataServiceException;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.TypesDef;
......@@ -31,16 +32,7 @@ import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization$;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeUtils;
import org.apache.hadoop.metadata.typesystem.types.*;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
......@@ -93,7 +85,46 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}
}
@Test (dependsOnMethods = "testSubmitEntity")
@Test
public void testSubmitEntityWithBadDateFormat() throws Exception {
try {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
databaseInstance.set("name", DATABASE_NAME);
databaseInstance.set("description", "foo database");
Referenceable tableInstance = new Referenceable(TABLE_TYPE,
"classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table");
tableInstance.set("date", "2014-07-11");
tableInstance.set("type", "managed");
tableInstance.set("level", 2);
tableInstance.set("tableType", 1); // enum
tableInstance.set("database", databaseInstance);
tableInstance.set("compressed", false);
Struct traitInstance = (Struct) tableInstance.getTrait("classification");
traitInstance.set("tag", "foundation_etl");
Struct serde1Instance = new Struct("serdeType");
serde1Instance.set("name", "serde1");
serde1Instance.set("serde", "serde1");
tableInstance.set("serde1", serde1Instance);
Struct serde2Instance = new Struct("serdeType");
serde2Instance.set("name", "serde2");
serde2Instance.set("serde", "serde2");
tableInstance.set("serde2", serde2Instance);
tableId = createInstance(tableInstance);
Assert.fail("Was expecting an exception here ");
} catch (MetadataServiceException e) {
Assert.assertTrue(e.getMessage().contains("\"error\":\"Cannot convert value '2014-07-11' to datatype date\""));
}
}
@Test(dependsOnMethods = "testSubmitEntity")
public void testAddProperty() throws Exception {
final String guid = tableId._getId();
//add property
......@@ -120,7 +151,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
tableInstance.set("level", 4);
}
@Test (dependsOnMethods = "testSubmitEntity")
@Test(dependsOnMethods = "testSubmitEntity")
public void testAddReferenceProperty() throws Exception {
//Create new db instance
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
......@@ -242,6 +273,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(response.get(MetadataServiceClient.STACKTRACE));
}
@Test
public void testGetEntityListForNoInstances() throws Exception {
addNewType();
......@@ -275,7 +307,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
createType(typesAsJSON);
}
@Test (dependsOnMethods = "testSubmitEntity")
@Test(dependsOnMethods = "testSubmitEntity")
public void testGetTraitNames() throws Exception {
final String guid = tableId._getId();
ClientResponse clientResponse = service
......@@ -298,7 +330,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertEquals(list.length(), 7);
}
@Test (dependsOnMethods = "testGetTraitNames")
@Test(dependsOnMethods = "testGetTraitNames")
public void testAddTrait() throws Exception {
final String traitName = "PII_Trait";
HierarchicalTypeDefinition<TraitType> piiTrait =
......@@ -352,7 +384,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
}
@Test (dependsOnMethods = "testAddTrait")
@Test(dependsOnMethods = "testAddTrait")
public void testDeleteTrait() throws Exception {
final String traitName = "PII_Trait";
final String guid = tableId._getId();
......@@ -454,6 +486,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("date", DataTypes.DATE_TYPE),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE),
new AttributeDefinition("tableType", "tableType",
Multiplicity.REQUIRED, false, null),
......@@ -501,6 +534,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
"classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table");
tableInstance.set("date", "2014-07-11T08:00:00.000Z");
tableInstance.set("type", "managed");
tableInstance.set("level", 2);
tableInstance.set("tableType", 1); // enum
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment