Commit 25a68075 by Shwetha G S

Merge pull request #115 from sumashivaprasad/BUG_37105

Entity creation should fail with a meaningful error
parents 4e1e84b1 deaf3164
...@@ -51,7 +51,7 @@ public class MetadataServiceClient { ...@@ -51,7 +51,7 @@ public class MetadataServiceClient {
public static final String NAME = "name"; public static final String NAME = "name";
public static final String GUID = "GUID"; public static final String GUID = "GUID";
public static final String TYPENAME = "typeName"; public static final String TYPENAME = "typeName";
public static final String TYPE = "type";
public static final String DEFINITION = "definition"; public static final String DEFINITION = "definition";
public static final String ERROR = "error"; public static final String ERROR = "error";
......
...@@ -968,7 +968,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -968,7 +968,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
public void mapVertexToAttribute(Vertex instanceVertex, ITypedInstance typedInstance, public void mapVertexToAttribute(Vertex instanceVertex, ITypedInstance typedInstance,
AttributeInfo attributeInfo) throws MetadataException { AttributeInfo attributeInfo) throws MetadataException {
LOG.debug("mapping attributeInfo {}", attributeInfo.name); LOG.debug("Mapping attributeInfo {}", attributeInfo.name);
final IDataType dataType = attributeInfo.dataType(); final IDataType dataType = attributeInfo.dataType();
final String vertexPropertyName = getQualifiedName(typedInstance, attributeInfo); final String vertexPropertyName = getQualifiedName(typedInstance, attributeInfo);
......
...@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions; ...@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.services.MetadataService; import org.apache.hadoop.metadata.services.MetadataService;
import org.apache.hadoop.metadata.typesystem.types.ValueConversionException;
import org.apache.hadoop.metadata.web.util.Servlets; import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
...@@ -90,6 +91,11 @@ public class EntityResource { ...@@ -90,6 +91,11 @@ public class EntityResource {
response.put(MetadataServiceClient.DEFINITION, entity); response.put(MetadataServiceClient.DEFINITION, entity);
return Response.created(locationURI).entity(response).build(); return Response.created(locationURI).entity(response).build();
} catch(ValueConversionException ve) {
LOG.error("Unable to persist entity instance due to a desrialization error ", ve);
throw new WebApplicationException(
Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST));
} catch (MetadataException | IllegalArgumentException e) { } catch (MetadataException | IllegalArgumentException e) {
LOG.error("Unable to persist entity instance", e); LOG.error("Unable to persist entity instance", e);
throw new WebApplicationException( throw new WebApplicationException(
...@@ -123,7 +129,7 @@ public class EntityResource { ...@@ -123,7 +129,7 @@ public class EntityResource {
response.put(MetadataServiceClient.DEFINITION, entityDefinition); response.put(MetadataServiceClient.DEFINITION, entityDefinition);
status = Response.Status.OK; status = Response.Status.OK;
} else { } else {
response.put(MetadataServiceClient.ERROR, JSONObject.quote(String.format("An entity with GUID={%s} does not exist", guid))); response.put(MetadataServiceClient.ERROR, Servlets.escapeJsonString(String.format("An entity with GUID={%s} does not exist", guid)));
} }
return Response.status(status).entity(response).build(); return Response.status(status).entity(response).build();
...@@ -155,7 +161,7 @@ public class EntityResource { ...@@ -155,7 +161,7 @@ public class EntityResource {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId()); response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put("type", entityType); response.put(MetadataServiceClient.TYPENAME, entityType);
response.put(MetadataServiceClient.RESULTS, new JSONArray(entityList)); response.put(MetadataServiceClient.RESULTS, new JSONArray(entityList));
response.put(MetadataServiceClient.COUNT, entityList.size()); response.put(MetadataServiceClient.COUNT, entityList.size());
...@@ -192,7 +198,7 @@ public class EntityResource { ...@@ -192,7 +198,7 @@ public class EntityResource {
metadataService.updateEntity(guid, property, value); metadataService.updateEntity(guid, property, value);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("requestId", Thread.currentThread().getName()); response.put(MetadataServiceClient.REQUEST_ID, Thread.currentThread().getName());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (MetadataException e) { } catch (MetadataException e) {
LOG.error("Unable to add property {} to entity id {}", property, guid, e); LOG.error("Unable to add property {} to entity id {}", property, guid, e);
......
...@@ -83,7 +83,7 @@ public class TypesResource { ...@@ -83,7 +83,7 @@ public class TypesResource {
public Response submit(@Context HttpServletRequest request) { public Response submit(@Context HttpServletRequest request) {
try { try {
final String typeDefinition = Servlets.getRequestPayload(request); final String typeDefinition = Servlets.getRequestPayload(request);
LOG.debug("creating type with definition {} ", typeDefinition); LOG.debug("Creating type with definition {} ", typeDefinition);
JSONObject typesJson = metadataService.createType(typeDefinition); JSONObject typesJson = metadataService.createType(typeDefinition);
final JSONArray typesJsonArray = typesJson.getJSONArray(MetadataServiceClient.TYPES); final JSONArray typesJsonArray = typesJson.getJSONArray(MetadataServiceClient.TYPES);
......
...@@ -6,9 +6,9 @@ ...@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* * <p/>
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* * <p/>
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
...@@ -23,6 +23,7 @@ import com.sun.jersey.api.client.ClientResponse; ...@@ -23,6 +23,7 @@ import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.MetadataServiceException;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.hadoop.metadata.typesystem.TypesDef;
...@@ -31,16 +32,7 @@ import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization$; ...@@ -31,16 +32,7 @@ import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization$;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.hadoop.metadata.typesystem.types.*;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeUtils;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
...@@ -93,7 +85,46 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -93,7 +85,46 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
} }
} }
@Test (dependsOnMethods = "testSubmitEntity") @Test
public void testSubmitEntityWithBadDateFormat() throws Exception {
try {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
databaseInstance.set("name", DATABASE_NAME);
databaseInstance.set("description", "foo database");
Referenceable tableInstance = new Referenceable(TABLE_TYPE,
"classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table");
tableInstance.set("date", "2014-07-11");
tableInstance.set("type", "managed");
tableInstance.set("level", 2);
tableInstance.set("tableType", 1); // enum
tableInstance.set("database", databaseInstance);
tableInstance.set("compressed", false);
Struct traitInstance = (Struct) tableInstance.getTrait("classification");
traitInstance.set("tag", "foundation_etl");
Struct serde1Instance = new Struct("serdeType");
serde1Instance.set("name", "serde1");
serde1Instance.set("serde", "serde1");
tableInstance.set("serde1", serde1Instance);
Struct serde2Instance = new Struct("serdeType");
serde2Instance.set("name", "serde2");
serde2Instance.set("serde", "serde2");
tableInstance.set("serde2", serde2Instance);
tableId = createInstance(tableInstance);
Assert.fail("Was expecting an exception here ");
} catch (MetadataServiceException e) {
Assert.assertTrue(e.getMessage().contains("\"error\":\"Cannot convert value '2014-07-11' to datatype date\""));
}
}
@Test(dependsOnMethods = "testSubmitEntity")
public void testAddProperty() throws Exception { public void testAddProperty() throws Exception {
final String guid = tableId._getId(); final String guid = tableId._getId();
//add property //add property
...@@ -120,7 +151,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -120,7 +151,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
tableInstance.set("level", 4); tableInstance.set("level", 4);
} }
@Test (dependsOnMethods = "testSubmitEntity") @Test(dependsOnMethods = "testSubmitEntity")
public void testAddReferenceProperty() throws Exception { public void testAddReferenceProperty() throws Exception {
//Create new db instance //Create new db instance
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE); Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
...@@ -242,6 +273,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -242,6 +273,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(response.get(MetadataServiceClient.STACKTRACE)); Assert.assertNotNull(response.get(MetadataServiceClient.STACKTRACE));
} }
@Test @Test
public void testGetEntityListForNoInstances() throws Exception { public void testGetEntityListForNoInstances() throws Exception {
addNewType(); addNewType();
...@@ -275,7 +307,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -275,7 +307,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
createType(typesAsJSON); createType(typesAsJSON);
} }
@Test (dependsOnMethods = "testSubmitEntity") @Test(dependsOnMethods = "testSubmitEntity")
public void testGetTraitNames() throws Exception { public void testGetTraitNames() throws Exception {
final String guid = tableId._getId(); final String guid = tableId._getId();
ClientResponse clientResponse = service ClientResponse clientResponse = service
...@@ -298,7 +330,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -298,7 +330,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertEquals(list.length(), 7); Assert.assertEquals(list.length(), 7);
} }
@Test (dependsOnMethods = "testGetTraitNames") @Test(dependsOnMethods = "testGetTraitNames")
public void testAddTrait() throws Exception { public void testAddTrait() throws Exception {
final String traitName = "PII_Trait"; final String traitName = "PII_Trait";
HierarchicalTypeDefinition<TraitType> piiTrait = HierarchicalTypeDefinition<TraitType> piiTrait =
...@@ -352,7 +384,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -352,7 +384,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode()); Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
} }
@Test (dependsOnMethods = "testAddTrait") @Test(dependsOnMethods = "testAddTrait")
public void testDeleteTrait() throws Exception { public void testDeleteTrait() throws Exception {
final String traitName = "PII_Trait"; final String traitName = "PII_Trait";
final String guid = tableId._getId(); final String guid = tableId._getId();
...@@ -454,6 +486,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -454,6 +486,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE), TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE), TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("date", DataTypes.DATE_TYPE),
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE), TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE),
new AttributeDefinition("tableType", "tableType", new AttributeDefinition("tableType", "tableType",
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
...@@ -501,6 +534,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -501,6 +534,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
"classification", "pii", "phi", "pci", "sox", "sec", "finance"); "classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set("name", TABLE_NAME); tableInstance.set("name", TABLE_NAME);
tableInstance.set("description", "bar table"); tableInstance.set("description", "bar table");
tableInstance.set("date", "2014-07-11T08:00:00.000Z");
tableInstance.set("type", "managed"); tableInstance.set("type", "managed");
tableInstance.set("level", 2); tableInstance.set("level", 2);
tableInstance.set("tableType", 1); // enum tableInstance.set("tableType", 1); // enum
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment