Commit d41e549f by Hemanth Yamijala

ATLAS-793 Business Catalog Delete (jspeidel via yhemanth)

parent b8a5d4d6
......@@ -20,6 +20,7 @@ package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import java.util.Map;
......@@ -54,6 +55,16 @@ public interface AtlasTypeSystem {
throws ResourceAlreadyExistsException;
/**
* Delete an entity from the Atlas type system.
*
* @param definition definition of the resource being deleted
* @param request user request
*
* @throws ResourceNotFoundException if the resource to delete doesn't exist
*/
void deleteEntity(ResourceDefinition definition, Request request) throws ResourceNotFoundException;
/**
* Create a trait instance instance in the Atlas Type System.
*
* @param resourceDefinition resource definition for trait type being created
......@@ -76,4 +87,14 @@ public interface AtlasTypeSystem {
*/
void createTraitInstance(String guid, String typeName, Map<String, Object> properties)
throws ResourceAlreadyExistsException;
/**
* Delete a tag instance.
*
* @param guid associated entity guid
* @param traitName name of the trait to delete
*
* @throws ResourceNotFoundException if the specified trait doesn't exist for the specified entity
*/
void deleteTag(String guid, String traitName) throws ResourceNotFoundException;
}
......@@ -59,4 +59,26 @@ public abstract class BaseRequest implements Request {
public Collection<String> getAdditionalSelectProperties() {
return additionalSelectProperties;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BaseRequest that = (BaseRequest) o;
return properties.equals(that.properties) &&
additionalSelectProperties.equals(that.additionalSelectProperties) &&
queryString == null ?
that.queryString == null :
queryString.equals(that.queryString);
}
@Override
public int hashCode() {
int result = properties.hashCode();
result = 31 * result + (queryString != null ? queryString.hashCode() : 0);
result = 31 * result + additionalSelectProperties.hashCode();
return result;
}
}
......@@ -18,11 +18,10 @@
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.QueryFactory;
import java.util.*;
/**
* Base class for resource providers.
*/
......@@ -37,4 +36,9 @@ public abstract class BaseResourceProvider implements ResourceProvider {
protected void setQueryFactory(QueryFactory factory) {
queryFactory = factory;
}
@Override
public void deleteResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException {
throw new InvalidPayloadException("Delete is not supported for this resource type");
}
}
......@@ -18,15 +18,20 @@
package org.apache.atlas.catalog;
import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.atlas.AtlasException;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.repository.graph.TitanGraphProvider;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.exception.EntityExistsException;
import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.typesystem.exception.TraitNotFoundException;
import org.apache.atlas.typesystem.exception.TypeExistsException;
import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.atlas.typesystem.types.*;
......@@ -71,6 +76,24 @@ public class DefaultTypeSystem implements AtlasTypeSystem {
}
@Override
public void deleteEntity(ResourceDefinition definition, Request request) throws ResourceNotFoundException {
String typeName = definition.getTypeName();
String cleanIdPropName = definition.getIdPropertyName();
String idValue = request.getProperty(cleanIdPropName);
try {
// transaction handled by atlas repository
metadataService.deleteEntityByUniqueAttribute(typeName, cleanIdPropName, idValue);
} catch (EntityNotFoundException e) {
throw new ResourceNotFoundException(String.format("The specified entity doesn't exist: type=%s, %s=%s",
typeName, cleanIdPropName, idValue));
} catch (AtlasException e) {
throw new CatalogRuntimeException(String.format(
"An unexpected error occurred while attempting to delete entity: type=%s, %s=%s : %s",
typeName, cleanIdPropName, idValue, e), e);
}
}
@Override
public void createClassType(ResourceDefinition resourceDefinition, String name, String description)
throws ResourceAlreadyExistsException {
......@@ -84,6 +107,7 @@ public class DefaultTypeSystem implements AtlasTypeSystem {
createType(resourceDefinition.getPropertyDefinitions(), TraitType.class, name, description, true);
}
@Override
public void createTraitInstance(String guid, String typeName, Map<String, Object> properties)
throws ResourceAlreadyExistsException {
......@@ -108,6 +132,19 @@ public class DefaultTypeSystem implements AtlasTypeSystem {
}
}
@Override
public void deleteTag(String guid, String traitName) throws ResourceNotFoundException {
try {
metadataService.deleteTrait(guid, traitName);
} catch (TraitNotFoundException e) {
throw new ResourceNotFoundException(String.format(
"The trait '%s' doesn't exist for entity '%s'", traitName, guid));
} catch (AtlasException e) {
throw new CatalogRuntimeException(String.format(
"Unable to delete tag '%s' from entity '%s'", traitName, guid), e);
}
}
private <T extends HierarchicalType> void createType(Collection<AttributeDefinition> attributes,
Class<T> type,
String name,
......
......@@ -96,6 +96,11 @@ public class EntityTagResourceProvider extends BaseResourceProvider implements R
return relativeUrls;
}
@Override
public void deleteResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException {
typeSystem.deleteTag(request.<String>getProperty("id"), request.<String>getProperty("name"));
}
private Result getTermQueryResult(String termName) throws ResourceNotFoundException {
Request tagRequest = new InstanceRequest(
Collections.<String, Object>singletonMap("termPath", new TermPath(termName)));
......
......@@ -60,6 +60,16 @@ public interface ResourceProvider {
void createResource(Request request)
throws InvalidPayloadException, ResourceAlreadyExistsException, ResourceNotFoundException;
/**
* Delete a single resource.
*
* @param request request instance containing the id of the resource to delete.
*
* @throws ResourceNotFoundException if the resource doesn't exist
* @throws InvalidPayloadException if the request is invalid
*/
void deleteResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException;
//todo: define the behavior for partial success
/**
* Create multiple resources.
......
......@@ -29,9 +29,12 @@ import java.util.*;
* Provider for taxonomy resources.
*/
public class TaxonomyResourceProvider extends BaseResourceProvider implements ResourceProvider {
private final TermResourceProvider termResourceProvider;
private static final ResourceDefinition resourceDefinition = new TaxonomyResourceDefinition();
public TaxonomyResourceProvider(AtlasTypeSystem typeSystem) {
super(typeSystem);
termResourceProvider = new TermResourceProvider(typeSystem);
}
@Override
......@@ -68,6 +71,17 @@ public class TaxonomyResourceProvider extends BaseResourceProvider implements Re
throw new UnsupportedOperationException("Creating multiple Taxonomies in a request is not currently supported");
}
@Override
public void deleteResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException {
request.addAdditionalSelectProperties(Collections.singleton("id"));
// will result in expected ResourceNotFoundException if taxonomy doesn't exist
Result taxonomyResult = getResourceById(request);
String taxonomyId = String.valueOf(taxonomyResult.getPropertyMaps().iterator().next().get("id"));
getTermResourceProvider().deleteChildren(taxonomyId, new TermPath(request.<String>getProperty("name")));
typeSystem.deleteEntity(resourceDefinition, request);
}
private void ensureTaxonomyDoesntExist(Request request) throws ResourceAlreadyExistsException {
try {
getResourceById(request);
......@@ -77,4 +91,8 @@ public class TaxonomyResourceProvider extends BaseResourceProvider implements Re
// expected case
}
}
protected TermResourceProvider getTermResourceProvider() {
return termResourceProvider;
}
}
\ No newline at end of file
......@@ -30,7 +30,9 @@ import java.util.*;
*/
public class TermResourceProvider extends BaseResourceProvider implements ResourceProvider {
private final static ResourceDefinition resourceDefinition = new TermResourceDefinition();
private TaxonomyResourceProvider taxonomyResourceProvider;
private ResourceProvider taxonomyResourceProvider;
private ResourceProvider entityResourceProvider;
private ResourceProvider entityTagResourceProvider;
public TermResourceProvider(AtlasTypeSystem typeSystem) {
super(typeSystem);
......@@ -99,6 +101,83 @@ public class TermResourceProvider extends BaseResourceProvider implements Resour
throw new UnsupportedOperationException("Creating multiple Terms in a request is not currently supported");
}
@Override
public void deleteResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException {
// will result in expected ResourceNotFoundException if term doesn't exist
getResourceById(request);
TermPath termPath = (TermPath) request.getProperties().get("termPath");
String taxonomyId = getTaxonomyId(termPath);
deleteChildren(taxonomyId, termPath);
deleteTerm(taxonomyId, termPath);
}
protected void deleteChildren(String taxonomyId, TermPath termPath)
throws ResourceNotFoundException, InvalidPayloadException {
TermPath collectionTermPath = new TermPath(termPath.getFullyQualifiedName() + ".");
Request queryRequest = new CollectionRequest(Collections.<String, Object>singletonMap("termPath",
collectionTermPath), null);
AtlasQuery collectionQuery;
try {
collectionQuery = queryFactory.createTermQuery(queryRequest);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Failed to compile internal predicate: " + e, e);
}
Collection<Map<String, Object>> children = collectionQuery.execute();
for (Map<String, Object> childMap : children) {
deleteTerm(taxonomyId, new TermPath(String.valueOf(childMap.get("name"))));
}
}
private void deleteTerm(String taxonomyId, TermPath termPath)
throws ResourceNotFoundException, InvalidPayloadException {
String fullyQualifiedName = termPath.getFullyQualifiedName();
deleteEntityTagsForTerm(fullyQualifiedName);
// delete term instance associated with the taxonomy
typeSystem.deleteTag(taxonomyId, fullyQualifiedName);
//todo: Currently no way to delete type via MetadataService or MetadataRepository
}
private void deleteEntityTagsForTerm(String fullyQualifiedName) throws ResourceNotFoundException {
String entityQueryStr = String.format("tags/name:%s", fullyQualifiedName);
Request entityRequest = new CollectionRequest(Collections.<String, Object>emptyMap(), entityQueryStr);
Result entityResult;
try {
entityResult = getEntityResourceProvider().getResources(entityRequest);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException(String.format(
"Failed to compile internal predicate for query '%s': %s", entityQueryStr, e), e);
}
for (Map<String, Object> entityResultMap : entityResult.getPropertyMaps()) {
Map<String, Object> tagRequestProperties = new HashMap<>();
tagRequestProperties.put("id", String.valueOf(entityResultMap.get("id")));
tagRequestProperties.put("name", fullyQualifiedName);
try {
getEntityTagResourceProvider().deleteResourceById(new InstanceRequest(tagRequestProperties));
} catch (InvalidPayloadException e) {
throw new CatalogRuntimeException(
"An internal error occurred while trying to delete an entity tag: " + e, e);
}
}
}
private String getTaxonomyId(TermPath termPath) throws ResourceNotFoundException {
Request taxonomyRequest = new InstanceRequest(Collections.<String, Object>singletonMap(
"name", termPath.getTaxonomyName()));
taxonomyRequest.addAdditionalSelectProperties(Collections.singleton("id"));
// will result in proper ResourceNotFoundException if taxonomy doesn't exist
Result taxonomyResult = getTaxonomyResourceProvider().getResourceById(taxonomyRequest);
Map<String, Object> taxonomyResultMap = taxonomyResult.getPropertyMaps().iterator().next();
return String.valueOf(taxonomyResultMap.get("id"));
}
//todo: add generic support for pre-query modification of expected value
//todo: similar path parsing code is used in several places in this class
private String doQueryStringConversions(TermPath termPath, String queryStr) throws InvalidQueryException {
......@@ -118,6 +197,20 @@ public class TermResourceProvider extends BaseResourceProvider implements Resour
}
return taxonomyResourceProvider;
}
protected synchronized ResourceProvider getEntityResourceProvider() {
if (entityResourceProvider == null) {
entityResourceProvider = new EntityResourceProvider(typeSystem);
}
return entityResourceProvider;
}
protected synchronized ResourceProvider getEntityTagResourceProvider() {
if (entityTagResourceProvider == null) {
entityTagResourceProvider = new EntityTagResourceProvider(typeSystem);
}
return entityTagResourceProvider;
}
}
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.persistence.Id;
/**
* Provides functionality common across implementations.
*/
public abstract class BaseRelation implements Relation {
protected boolean isDeleted(Vertex v) {
return ! Id.EntityState.ACTIVE.name().equals(
v.<String>getProperty(Constants.STATE_PROPERTY_KEY));
}
}
......@@ -34,7 +34,7 @@ import java.util.Map;
/**
* Represents a generic relation
*/
public class GenericRelation implements Relation {
public class GenericRelation extends BaseRelation {
private final ResourceDefinition resourceDefinition;
public GenericRelation(ResourceDefinition resourceDefinition) {
......@@ -52,14 +52,16 @@ public class GenericRelation implements Relation {
String edgePrefix = String.format("%s%s.", Constants.INTERNAL_PROPERTY_KEY_PREFIX, vertexType);
if (edgeLabel.startsWith(edgePrefix)) {
Vertex adjacentVertex = e.getVertex(Direction.IN);
VertexWrapper relationVertex = new VertexWrapper(adjacentVertex, resourceDefinition);
String relationName = edgeLabel.substring(edgePrefix.length());
Collection<VertexWrapper> vertices = vertexMap.get(relationName);
if (vertices == null) {
vertices = new ArrayList<>();
vertexMap.put(relationName, vertices);
if (! isDeleted(adjacentVertex)) {
VertexWrapper relationVertex = new VertexWrapper(adjacentVertex, resourceDefinition);
String relationName = edgeLabel.substring(edgePrefix.length());
Collection<VertexWrapper> vertices = vertexMap.get(relationName);
if (vertices == null) {
vertices = new ArrayList<>();
vertexMap.put(relationName, vertices);
}
vertices.add(relationVertex);
}
vertices.add(relationVertex);
}
}
for (Map.Entry<String, Collection<VertexWrapper>> entry : vertexMap.entrySet()) {
......
......@@ -37,7 +37,7 @@ import java.util.Collections;
/**
* Relation for adjacent Tag vertices.
*/
public class TagRelation implements Relation {
public class TagRelation extends BaseRelation {
private static ResourceDefinition resourceDefinition = new EntityTagResourceDefinition();
@Override
public Collection<RelationSet> traverse(VertexWrapper vWrapper) {
......@@ -46,7 +46,7 @@ public class TagRelation implements Relation {
for (Edge e : v.getEdges(Direction.OUT)) {
if (e.getLabel().startsWith(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY))) {
VertexWrapper trait = new TermVertexWrapper(e.getVertex(Direction.IN));
if (trait.getPropertyKeys().contains("available_as_tag")) {
if (trait.getPropertyKeys().contains("available_as_tag") && ! isDeleted(trait.getVertex())) {
vertices.add(trait);
}
}
......@@ -60,8 +60,12 @@ public class TagRelation implements Relation {
@Override
public Boolean compute(Edge edge) {
String name = edge.getVertex(Direction.OUT).getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
VertexWrapper v = new TermVertexWrapper(edge.getVertex(Direction.IN));
return edge.getLabel().startsWith(name) && v.getPropertyKeys().contains("available_as_tag");
if (edge.getLabel().startsWith(name)) {
VertexWrapper v = new TermVertexWrapper(edge.getVertex(Direction.IN));
return v.getPropertyKeys().contains("available_as_tag") && ! isDeleted(v.getVertex());
} else {
return false;
}
}
});
}
......
......@@ -38,7 +38,7 @@ import java.util.Collections;
* Trait specific relation.
*/
//todo: combine with TagRelation
public class TraitRelation implements Relation {
public class TraitRelation extends BaseRelation {
//todo: for now using entity tag resource definition
private static ResourceDefinition resourceDefinition = new EntityTagResourceDefinition();
......@@ -49,7 +49,7 @@ public class TraitRelation implements Relation {
for (Edge e : v.getEdges(Direction.OUT)) {
if (e.getLabel().startsWith(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY))) {
VertexWrapper trait = new TermVertexWrapper(e.getVertex(Direction.IN));
if (! trait.getPropertyKeys().contains("available_as_tag")) {
if (! trait.getPropertyKeys().contains("available_as_tag") && ! isDeleted(trait.getVertex())) {
vertices.add(trait);
}
}
......@@ -62,9 +62,13 @@ public class TraitRelation implements Relation {
return new FilterFunctionPipe<>(new PipeFunction<Edge, Boolean>() {
@Override
public Boolean compute(Edge edge) {
String type = edge.getVertex(Direction.OUT).getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
VertexWrapper v = new TermVertexWrapper(edge.getVertex(Direction.IN));
return edge.getLabel().startsWith(type) && ! v.getPropertyKeys().contains("available_as_tag");
String name = edge.getVertex(Direction.OUT).getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
if (edge.getLabel().startsWith(name)) {
VertexWrapper v = new TermVertexWrapper(edge.getVertex(Direction.IN));
return ! v.getPropertyKeys().contains("available_as_tag") && ! isDeleted(v.getVertex());
} else {
return false;
}
}
});
}
......
......@@ -19,6 +19,7 @@
package org.apache.atlas.catalog.query;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
......@@ -31,10 +32,10 @@ public class AtlasEntityQuery extends BaseQuery {
super(queryExpression, resourceDefinition, request);
}
protected GremlinPipeline getInitialPipeline() {
protected Pipe getQueryPipe() {
//todo: the property 'entityText' isn't currently indexed
//todo: we could use Constants.ENTITY_TYPE_PROPERTY_KEY initially but trait instances also contain this property
return new GremlinPipeline(getGraph()).V().has(Constants.ENTITY_TEXT_PROPERTY_KEY).
return new GremlinPipeline().has(Constants.ENTITY_TEXT_PROPERTY_KEY).
hasNot(Constants.ENTITY_TYPE_PROPERTY_KEY, "Taxonomy");
}
}
......@@ -21,6 +21,7 @@ package org.apache.atlas.catalog.query;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.filter.FilterFunctionPipe;
import org.apache.atlas.catalog.Request;
......@@ -45,8 +46,8 @@ public class AtlasEntityTagQuery extends BaseQuery {
}
@Override
protected GremlinPipeline getInitialPipeline() {
GremlinPipeline p = new GremlinPipeline(getGraph()).V().has(Constants.GUID_PROPERTY_KEY, guid).outE();
protected Pipe getQueryPipe() {
GremlinPipeline p = new GremlinPipeline().has(Constants.GUID_PROPERTY_KEY, guid).outE();
//todo: this is basically the same pipeline used in TagRelation.asPipe()
p.add(new FilterFunctionPipe<>(new PipeFunction<Edge, Boolean>() {
@Override
......
......@@ -19,6 +19,7 @@
package org.apache.atlas.catalog.query;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.definition.ResourceDefinition;
......@@ -31,7 +32,7 @@ public class AtlasTaxonomyQuery extends BaseQuery {
}
@Override
protected GremlinPipeline getInitialPipeline() {
return new GremlinPipeline(getGraph()).V().has("__typeName", "Taxonomy");
protected Pipe getQueryPipe() {
return new GremlinPipeline().has("__typeName", "Taxonomy");
}
}
......@@ -20,6 +20,7 @@ package org.apache.atlas.catalog.query;
import com.thinkaurelius.titan.core.attribute.Text;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TermPath;
import org.apache.atlas.catalog.definition.ResourceDefinition;
......@@ -37,8 +38,8 @@ public class AtlasTermQuery extends BaseQuery {
}
@Override
protected GremlinPipeline getInitialPipeline() {
return new GremlinPipeline(getGraph()).V().has("Taxonomy.name", termPath.getTaxonomyName()).out().
protected Pipe getQueryPipe() {
return new GremlinPipeline().has("Taxonomy.name", termPath.getTaxonomyName()).out().
has(Constants.ENTITY_TYPE_PROPERTY_KEY, Text.PREFIX, termPath.getFullyQualifiedName());
}
}
......@@ -19,16 +19,20 @@
package org.apache.atlas.catalog.query;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Compare;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.filter.PropertyFilterPipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.ProjectionResult;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.TitanGraphProvider;
import org.apache.atlas.typesystem.persistence.Id;
import java.util.ArrayList;
import java.util.Collection;
......@@ -59,13 +63,13 @@ public abstract class BaseQuery implements AtlasQuery {
}
private List<Vertex> executeQuery() {
GremlinPipeline pipeline = getInitialPipeline().as("root");
GremlinPipeline pipeline = buildPipeline().as("root");
Pipe adapterPipe = queryExpression.asPipe();
Pipe expressionPipe = queryExpression.asPipe();
try {
// AlwaysQuery returns null for pipe
List<Vertex> vertices = adapterPipe == null ? pipeline.toList() :
pipeline.add(adapterPipe).back("root").toList();
List<Vertex> vertices = expressionPipe == null ? pipeline.toList() :
pipeline.add(expressionPipe).back("root").toList();
// Even non-mutating queries can result in objects being created in
// the graph such as new fields or property keys. So, it is important
......@@ -79,9 +83,28 @@ public abstract class BaseQuery implements AtlasQuery {
}
}
protected abstract GremlinPipeline getInitialPipeline();
protected GremlinPipeline buildPipeline() {
GremlinPipeline pipeline = getRootVertexPipeline();
Pipe queryPipe = getQueryPipe();
if (queryPipe != null) {
pipeline.add(queryPipe);
}
//todo: may be more efficient to move the notDeleted pipe after the expression pipe
pipeline.add(getNotDeletedPipe());
return pipeline;
}
protected abstract Pipe getQueryPipe();
protected GremlinPipeline getRootVertexPipeline() {
return new GremlinPipeline(getGraph().getVertices());
}
protected Pipe getNotDeletedPipe() {
return new PropertyFilterPipe(Constants.STATE_PROPERTY_KEY, Compare.EQUAL,
Id.EntityState.ACTIVE.name());
}
// todo: consider getting
protected Map<String, Object> processPropertyMap(VertexWrapper vertex) {
Map<String, Object> propertyMap = resourceDefinition.filterProperties(
request, vertex.getPropertyMap());
......@@ -99,7 +122,7 @@ public abstract class BaseQuery implements AtlasQuery {
}
}
private Map<String, Object> applyProjections(VertexWrapper vertex, Map<String, Object> propertyMap) {
protected Map<String, Object> applyProjections(VertexWrapper vertex, Map<String, Object> propertyMap) {
for (Projection p : resourceDefinition.getProjections().values()) {
for (ProjectionResult projectionResult : p.values(vertex)) {
if (p.getCardinality() == Projection.Cardinality.MULTIPLE) {
......
......@@ -424,6 +424,27 @@ public class EntityTagResourceProviderTest {
verify(typeSystem, queryFactory, entityQuery, termResourceProvider);
}
@Test
public void testDeleteResourceById() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
// mock expectations
typeSystem.deleteTag("1", "taxonomyName.termName");
replay(typeSystem);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName.termName");
requestProperties.put("id", "1");
Request userRequest = new InstanceRequest(requestProperties);
// instantiate EntityTagResourceProvider and invoke method being tested
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(null);
provider.deleteResourceById(userRequest);
verify(typeSystem);
}
//todo: test behavior of createResources in case of partial success after behavior is defined
......
......@@ -284,4 +284,93 @@ public class TaxonomyResourceProviderTest {
provider.createResources(userRequest);
}
@Test
public void testDeleteResourceById() throws Exception {
TermResourceProvider termResourceProvider = createStrictMock(TermResourceProvider.class);
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> getRequestCapture = newCapture();
Capture<TermPath> termPathCapture = newCapture();
Capture<ResourceDefinition> resourceDefinitionCapture = newCapture();
Capture<Request> deleteRequestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "testTaxonomy");
queryResultRow.put("id", "111-222-333");
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(getRequestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
termResourceProvider.deleteChildren(eq("111-222-333"), capture(termPathCapture));
typeSystem.deleteEntity(capture(resourceDefinitionCapture), capture(deleteRequestCapture));
replay(termResourceProvider, typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem, termResourceProvider);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "testTaxonomy");
Request userRequest = new InstanceRequest(requestProperties);
// invoke method being tested
provider.deleteResourceById(userRequest);
Request getRequest = getRequestCapture.getValue();
assertNull(getRequest.getQueryString());
assertEquals(getRequest.getAdditionalSelectProperties().size(), 1);
assertTrue(getRequest.getAdditionalSelectProperties().contains("id"));
assertEquals(getRequest.getProperties().get("name"), "testTaxonomy");
Request deleteRequest = deleteRequestCapture.getValue();
assertNull(deleteRequest.getQueryString());
assertEquals(deleteRequest.getAdditionalSelectProperties().size(), 1);
assertTrue(deleteRequest.getAdditionalSelectProperties().contains("id"));
assertEquals(deleteRequest.getProperties().get("name"), "testTaxonomy");
ResourceDefinition resourceDefinition = resourceDefinitionCapture.getValue();
assertTrue(resourceDefinition instanceof TaxonomyResourceDefinition);
verify(termResourceProvider, typeSystem, queryFactory, query);
}
@Test(expectedExceptions = ResourceNotFoundException.class)
public void testDeleteResourceById_404() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> getRequestCapture = newCapture();
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(getRequestCapture))).andReturn(query);
expect(query.execute()).andThrow(new ResourceNotFoundException("test msg"));
replay(typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem, null);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "badName");
Request userRequest = new InstanceRequest(requestProperties);
// invoke method being tested
provider.deleteResourceById(userRequest);
}
private static class TestTaxonomyResourceProvider extends TaxonomyResourceProvider {
private final TermResourceProvider termResourceProvider;
public TestTaxonomyResourceProvider(AtlasTypeSystem typeSystem, TermResourceProvider termResourceProvider) {
super(typeSystem);
this.termResourceProvider = termResourceProvider;
}
@Override
protected synchronized TermResourceProvider getTermResourceProvider() {
return termResourceProvider;
}
}
}
......@@ -25,6 +25,7 @@ import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.AtlasQuery;
import org.apache.atlas.catalog.query.QueryFactory;
import org.easymock.Capture;
import org.easymock.CaptureType;
import org.easymock.EasyMock;
import org.testng.annotations.Test;
......@@ -338,18 +339,170 @@ public class TermResourceProviderTest {
provider.createResources(userRequest);
}
@Test
public void testDeleteResourceById() throws Exception {
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider entityResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider entityTagResourceProvider = createStrictMock(ResourceProvider.class);
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> taxonomyRequestCapture = newCapture();
Capture<Request> termRequestCapture = newCapture();
// root term being deleted
TermPath termPath = new TermPath("testTaxonomy.termName");
// entity requests to get id's of entities tagged with terms
Request entityRequest1 = new CollectionRequest(Collections.<String, Object>emptyMap(),
"tags/name:testTaxonomy.termName.child1");
Request entityRequest2 = new CollectionRequest(Collections.<String, Object>emptyMap(),
"tags/name:testTaxonomy.termName.child2");
Request entityRequest3 = new CollectionRequest(Collections.<String, Object>emptyMap(),
"tags/name:testTaxonomy.termName");
// entity tag requests to delete entity tags
Map<String, Object> entityTagRequestMap1 = new HashMap<>();
entityTagRequestMap1.put("id", "111");
entityTagRequestMap1.put("name", "testTaxonomy.termName.child1");
Request entityTagRequest1 = new InstanceRequest(entityTagRequestMap1);
Map<String, Object> entityTagRequestMap2 = new HashMap<>();
entityTagRequestMap2.put("id", "222");
entityTagRequestMap2.put("name", "testTaxonomy.termName.child1");
Request entityTagRequest2 = new InstanceRequest(entityTagRequestMap2);
Map<String, Object> entityTagRequestMap3 = new HashMap<>();
entityTagRequestMap3.put("id", "333");
entityTagRequestMap3.put("name", "testTaxonomy.termName.child2");
Request entityTagRequest3 = new InstanceRequest(entityTagRequestMap3);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "testTaxonomy.termName");
queryResultRow.put("id", "111-222-333");
Collection<Map<String, Object>> taxonomyResultMaps = new ArrayList<>();
Map<String, Object> taxonomyResultMap = new HashMap<>();
taxonomyResultMap.put("name", "testTaxonomy");
taxonomyResultMap.put("id", "12345");
taxonomyResultMaps.add(taxonomyResultMap);
Result taxonomyResult = new Result(taxonomyResultMaps);
Collection<Map<String, Object>> childResult = new ArrayList<>();
Map<String, Object> childResultRow = new HashMap<>();
childResult.add(childResultRow);
childResultRow.put("name", "testTaxonomy.termName.child1");
childResultRow.put("id", "1-1-1");
Map<String, Object> childResultRow2 = new HashMap<>();
childResult.add(childResultRow2);
childResultRow2.put("name", "testTaxonomy.termName.child2");
childResultRow2.put("id", "2-2-2");
Collection<Map<String, Object>> entityResults1 = new ArrayList<>();
Map<String, Object> entityResult1Map1 = new HashMap<>();
entityResult1Map1.put("name", "entity1");
entityResult1Map1.put("id", "111");
entityResults1.add(entityResult1Map1);
Map<String, Object> entityResult1Map2 = new HashMap<>();
entityResult1Map2.put("name", "entity2");
entityResult1Map2.put("id", "222");
entityResults1.add(entityResult1Map2);
Result entityResult1 = new Result(entityResults1);
Collection<Map<String, Object>> entityResults2 = new ArrayList<>();
Map<String, Object> entityResult2Map = new HashMap<>();
entityResult2Map.put("name", "entity3");
entityResult2Map.put("id", "333");
entityResults2.add(entityResult2Map);
Result entityResult2 = new Result(entityResults2);
// mock expectations
// ensure term exists
expect(queryFactory.createTermQuery(userRequest)).andReturn(query);
expect(query.execute()).andReturn(queryResult);
// taxonomy query
expect(taxonomyResourceProvider.getResourceById(capture(taxonomyRequestCapture))).andReturn(taxonomyResult);
// get term children
expect(queryFactory.createTermQuery(capture(termRequestCapture))).andReturn(query);
expect(query.execute()).andReturn(childResult);
// entities with child1 tag
expect(entityResourceProvider.getResources(eq(entityRequest1))).andReturn(entityResult1);
// typeSystem.deleteTag("111", "testTaxonomy.termName.child1");
// typeSystem.deleteTag("222", "testTaxonomy.termName.child1");
entityTagResourceProvider.deleteResourceById(entityTagRequest1);
entityTagResourceProvider.deleteResourceById(entityTagRequest2);
// delete child1 from taxonomy
typeSystem.deleteTag("12345", "testTaxonomy.termName.child1");
// entities with child2 tag
expect(entityResourceProvider.getResources(eq(entityRequest2))).andReturn(entityResult2);
//typeSystem.deleteTag("333", "testTaxonomy.termName.child2");
entityTagResourceProvider.deleteResourceById(entityTagRequest3);
// delete child2 from taxonomy
typeSystem.deleteTag("12345", "testTaxonomy.termName.child2");
// root term being deleted which has no associated tags
expect(entityResourceProvider.getResources(eq(entityRequest3))).andReturn(
new Result(Collections.<Map<String, Object>>emptyList()));
// delete root term from taxonomy
typeSystem.deleteTag("12345", "testTaxonomy.termName");
replay(taxonomyResourceProvider, entityResourceProvider, entityTagResourceProvider, typeSystem, queryFactory, query);
TermResourceProvider provider = new TestTermResourceProvider(
typeSystem, taxonomyResourceProvider, entityResourceProvider, entityTagResourceProvider);
provider.setQueryFactory(queryFactory);
// invoke method being tested
provider.deleteResourceById(userRequest);
Request taxonomyRequest = taxonomyRequestCapture.getValue();
assertEquals(taxonomyRequest.getProperties().get("name"), "testTaxonomy");
assertEquals(taxonomyRequest.getAdditionalSelectProperties().size(), 1);
assertTrue(taxonomyRequest.getAdditionalSelectProperties().contains("id"));
Request childTermRequest = termRequestCapture.getValue();
assertEquals(childTermRequest.<TermPath>getProperty("termPath").getFullyQualifiedName(), "testTaxonomy.termName.");
verify(taxonomyResourceProvider, entityResourceProvider, entityTagResourceProvider, typeSystem, queryFactory, query);
}
private static class TestTermResourceProvider extends TermResourceProvider {
private ResourceProvider testTaxonomyResourceProvider;
private ResourceProvider testEntityResourceProvider;
private ResourceProvider testEntityTagResourceProvider;
public TestTermResourceProvider(AtlasTypeSystem typeSystem,
ResourceProvider taxonomyResourceProvider) {
super(typeSystem);
testTaxonomyResourceProvider = taxonomyResourceProvider;
}
public TestTermResourceProvider(AtlasTypeSystem typeSystem, ResourceProvider taxonomyResourceProvider) {
public TestTermResourceProvider(AtlasTypeSystem typeSystem,
ResourceProvider taxonomyResourceProvider,
ResourceProvider entityResourceProvider,
ResourceProvider entityTagResourceProvider) {
super(typeSystem);
testTaxonomyResourceProvider = taxonomyResourceProvider;
testEntityResourceProvider = entityResourceProvider;
testEntityTagResourceProvider = entityTagResourceProvider;
}
@Override
protected synchronized ResourceProvider getTaxonomyResourceProvider() {
return testTaxonomyResourceProvider;
}
@Override
protected synchronized ResourceProvider getEntityResourceProvider() {
return testEntityResourceProvider;
}
@Override
protected synchronized ResourceProvider getEntityTagResourceProvider() {
return testEntityTagResourceProvider;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.persistence.Id;
import org.testng.annotations.Test;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for TagRelation
*/
public class TagRelationTest {
@Test
public void testIsDeleted() {
Vertex v = createStrictMock(Vertex.class);
expect(v.getProperty(Constants.STATE_PROPERTY_KEY)).andReturn(Id.EntityState.ACTIVE.name());
replay(v);
BaseRelation relation = new TagRelation();
assertFalse(relation.isDeleted(v));
}
@Test
public void testIsDeleted_false() {
Vertex v = createStrictMock(Vertex.class);
expect(v.getProperty(Constants.STATE_PROPERTY_KEY)).andReturn(Id.EntityState.DELETED.name());
replay(v);
BaseRelation relation = new TagRelation();
assertTrue(relation.isDeleted(v));
}
}
......@@ -43,12 +43,16 @@ public class AtlasEntityQueryTest {
public void testExecute_Collection() throws Exception {
TitanGraph graph = createStrictMock(TitanGraph.class);
QueryExpression expression = createStrictMock(QueryExpression.class);
ResourceDefinition resourceDefinition = createNiceMock(ResourceDefinition.class);
Request request = createNiceMock(Request.class);
Pipe queryExpressionPipe = createStrictMock(Pipe.class);
ResourceDefinition resourceDefinition = createStrictMock(ResourceDefinition.class);
Request request = createStrictMock(Request.class);
GremlinPipeline initialPipeline = createStrictMock(GremlinPipeline.class);
Pipe queryPipe = createStrictMock(Pipe.class);
Pipe expressionPipe = createStrictMock(Pipe.class);
Pipe notDeletedPipe = createStrictMock(Pipe.class);
GremlinPipeline rootPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline queryPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline expressionPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline notDeletedPipeline = createStrictMock(GremlinPipeline.class);
Vertex vertex1 = createStrictMock(Vertex.class);
VertexWrapper vertex1Wrapper = createStrictMock(VertexWrapper.class);
......@@ -63,9 +67,11 @@ public class AtlasEntityQueryTest {
filteredVertex1PropertyMap.put("prop1", "prop1.value1");
// mock expectations
expect(initialPipeline.add(queryPipe)).andReturn(queryPipeline);
expect(initialPipeline.add(notDeletedPipe)).andReturn(notDeletedPipeline);
expect(initialPipeline.as("root")).andReturn(rootPipeline);
expect(expression.asPipe()).andReturn(queryExpressionPipe);
expect(rootPipeline.add(queryExpressionPipe)).andReturn(expressionPipeline);
expect(expression.asPipe()).andReturn(expressionPipe);
expect(rootPipeline.add(expressionPipe)).andReturn(expressionPipeline);
expect(expressionPipeline.back("root")).andReturn(rootPipeline);
expect(rootPipeline.toList()).andReturn(results);
graph.commit();
......@@ -74,12 +80,13 @@ public class AtlasEntityQueryTest {
expect(resourceDefinition.resolveHref(filteredVertex1PropertyMap)).andReturn("/foo/bar");
expect(request.getCardinality()).andReturn(Request.Cardinality.COLLECTION);
replay(graph, expression, resourceDefinition, request, queryExpressionPipe,
initialPipeline, rootPipeline, expressionPipeline, vertex1, vertex1Wrapper);
replay(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe,
notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline,
vertex1, vertex1Wrapper);
// end mock expectations
AtlasEntityQuery query = new TestAtlasEntityQuery(
expression, resourceDefinition, request, initialPipeline, graph, vertex1Wrapper);
AtlasEntityQuery query = new TestAtlasEntityQuery(expression, resourceDefinition, request,
initialPipeline, queryPipe, notDeletedPipe, graph, vertex1Wrapper);
// invoke method being tested
Collection<Map<String, Object>> queryResults = query.execute();
......@@ -90,36 +97,46 @@ public class AtlasEntityQueryTest {
assertEquals(queryResultMap.get("prop1"), "prop1.value1");
assertEquals(queryResultMap.get("href"), "/foo/bar");
verify(graph, expression, resourceDefinition, request, queryExpressionPipe,
initialPipeline, rootPipeline, expressionPipeline, vertex1, vertex1Wrapper);
verify(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe,
notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline,
vertex1, vertex1Wrapper);
}
@Test
public void testExecute_Collection_rollbackOnException() throws Exception {
TitanGraph graph = createStrictMock(TitanGraph.class);
QueryExpression expression = createStrictMock(QueryExpression.class);
ResourceDefinition resourceDefinition = createNiceMock(ResourceDefinition.class);
Request request = createNiceMock(Request.class);
Pipe queryExpressionPipe = createStrictMock(Pipe.class);
ResourceDefinition resourceDefinition = createStrictMock(ResourceDefinition.class);
Request request = createStrictMock(Request.class);
GremlinPipeline initialPipeline = createStrictMock(GremlinPipeline.class);
Pipe queryPipe = createStrictMock(Pipe.class);
Pipe expressionPipe = createStrictMock(Pipe.class);
Pipe notDeletedPipe = createStrictMock(Pipe.class);
GremlinPipeline rootPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline queryPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline expressionPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline notDeletedPipeline = createStrictMock(GremlinPipeline.class);
// mock expectations
expect(initialPipeline.add(queryPipe)).andReturn(queryPipeline);
expect(initialPipeline.add(notDeletedPipe)).andReturn(notDeletedPipeline);
expect(initialPipeline.as("root")).andReturn(rootPipeline);
expect(expression.asPipe()).andReturn(queryExpressionPipe);
expect(rootPipeline.add(queryExpressionPipe)).andReturn(expressionPipeline);
expect(expression.asPipe()).andReturn(expressionPipe);
expect(rootPipeline.add(expressionPipe)).andReturn(expressionPipeline);
expect(expressionPipeline.back("root")).andReturn(rootPipeline);
expect(rootPipeline.toList()).andThrow(new RuntimeException("something bad happened"));
graph.rollback();
replay(graph, expression, resourceDefinition, request, queryExpressionPipe,
initialPipeline, rootPipeline, expressionPipeline);
replay(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe,
notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline);
// end mock expectations
AtlasEntityQuery query = new TestAtlasEntityQuery(
expression, resourceDefinition, request, initialPipeline, graph, null);
AtlasEntityQuery query = new TestAtlasEntityQuery(expression, resourceDefinition, request,
initialPipeline, queryPipe, notDeletedPipe, graph, null);
try {
// invoke method being tested
......@@ -129,13 +146,14 @@ public class AtlasEntityQueryTest {
assertEquals(e.getMessage(), "something bad happened");
}
verify(graph, expression, resourceDefinition, request, queryExpressionPipe,
initialPipeline, rootPipeline, expressionPipeline);
verify(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe,
notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline);
}
private class TestAtlasEntityQuery extends AtlasEntityQuery {
private final GremlinPipeline initialPipeline;
private final Pipe queryPipe;
private final Pipe notDeletedPipe;
private final TitanGraph graph;
private final VertexWrapper vWrapper;
......@@ -143,21 +161,35 @@ public class AtlasEntityQueryTest {
ResourceDefinition resourceDefinition,
Request request,
GremlinPipeline initialPipeline,
Pipe queryPipe,
Pipe notDeletedPipe,
TitanGraph graph,
VertexWrapper vWrapper) {
super(queryExpression, resourceDefinition, request);
this.initialPipeline = initialPipeline;
this.queryPipe = queryPipe;
this.notDeletedPipe = notDeletedPipe;
this.graph = graph;
this.vWrapper = vWrapper;
}
@Override
protected GremlinPipeline getInitialPipeline() {
protected GremlinPipeline getRootVertexPipeline() {
return initialPipeline;
}
@Override
protected Pipe getQueryPipe() {
return queryPipe;
}
@Override
protected Pipe getNotDeletedPipe() {
return notDeletedPipe;
}
@Override
protected TitanGraph getGraph() {
return graph;
}
......
......@@ -22,6 +22,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES:
ATLAS-793 Business Catalog Delete (jspeidel via yhemanth)
ATLAS-846 Atlas UI : Add Pagination to Tags and Terms tabs of asset detailes page (kevalbhatt18 via yhemanth)
ATLAS-503 Lock exceptions occurring due to concurrent updates to backend stores (yhemanth)
ATLAS-766 Atlas policy file does not honour standard hash as comment format ( saqeeb.s via sumasai )
......
......@@ -191,8 +191,22 @@ public interface MetadataService {
*/
void addTrait(String guid, String traitInstanceDefinition) throws AtlasException;
//todo:
/**
* Adds a new trait to an existing entity represented by a guid.
*
* @param guid globally unique identifier for the entity
* @param traitInstance trait instance to add *
* @throws AtlasException if unable to add the trait instance
*/
void addTrait(String guid, ITypedStruct traitInstance) throws AtlasException;
/**
* Create a typed trait instance.
*
* @param traitInstance trait instance
* @return a typed trait instance
* @throws AtlasException if unable to create the typed trait instance
*/
ITypedStruct createTraitInstance(Struct traitInstance) throws AtlasException;
......
......@@ -22,6 +22,7 @@ import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import org.apache.atlas.catalog.*;
import org.apache.atlas.catalog.exception.*;
import org.apache.atlas.repository.graph.TitanGraphProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -43,6 +44,7 @@ public abstract class BaseService {
protected Result getResource(ResourceProvider provider, Request request)
throws ResourceNotFoundException {
initializeGraphTransaction();
try {
return provider.getResourceById(request);
} catch (RuntimeException e) {
......@@ -53,6 +55,7 @@ public abstract class BaseService {
protected Result getResources(ResourceProvider provider, Request request)
throws ResourceNotFoundException, InvalidQueryException {
initializeGraphTransaction();
try {
return provider.getResources(request);
} catch (RuntimeException e) {
......@@ -61,6 +64,7 @@ public abstract class BaseService {
}
protected void createResource(ResourceProvider provider, Request request) throws CatalogException {
initializeGraphTransaction();
try {
provider.createResource(request);
} catch (RuntimeException e) {
......@@ -68,8 +72,18 @@ public abstract class BaseService {
}
}
protected Collection<String> createResources(ResourceProvider provider, Request request) throws CatalogException {
protected void deleteResource(ResourceProvider provider, Request request) throws CatalogException {
initializeGraphTransaction();
try {
provider.deleteResourceById(request);
} catch (RuntimeException e) {
throw wrapRuntimeException(e);
}
}
protected Collection<String> createResources(ResourceProvider provider, Request request) throws CatalogException {
initializeGraphTransaction();
try {
return provider.createResources(request);
} catch (RuntimeException e) {
......@@ -96,10 +110,6 @@ public abstract class BaseService {
return properties;
}
private RuntimeException wrapRuntimeException(RuntimeException e) {
return e instanceof CatalogRuntimeException ? e : new CatalogRuntimeException(e);
}
protected String decode(String s) throws CatalogException {
try {
return s == null ? null : URLDecoder.decode(s, "UTF-8");
......@@ -108,6 +118,16 @@ public abstract class BaseService {
}
}
private RuntimeException wrapRuntimeException(RuntimeException e) {
return e instanceof CatalogRuntimeException ? e : new CatalogRuntimeException(e);
}
//todo: abstract via AtlasTypeSystem
// ensure that the thread wasn't re-pooled with an existing transaction
private void initializeGraphTransaction() {
TitanGraphProvider.getGraphInstance().rollback();
}
@XmlRootElement
// the name of this class is used as the collection name in the returned json when returning a collection
public static class Results {
......
......@@ -144,4 +144,21 @@ public class EntityService extends BaseService {
return Response.status(Response.Status.CREATED).entity(
new GenericEntity<Collection<Results>>(result) {}).build();
}
@DELETE
@Path("{entityId}/tags/{tag}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response deleteEntityTag(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("entityId") String entityId,
@PathParam("tag") String tagName) throws CatalogException {
Map<String, Object> properties = new HashMap<>();
properties.put("id", entityId);
properties.put("name", tagName);
deleteResource(entityTagResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(
new Results(ui.getRequestUri().toString(), 200)).build();
}
}
......@@ -91,6 +91,22 @@ public class TaxonomyService extends BaseService {
new Results(ui.getRequestUri().toString(), 201)).build();
}
@DELETE
@Path("{taxonomyName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response deleteTaxonomy(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName) throws CatalogException {
Map<String, Object> properties = new HashMap<>();
properties.put("name", taxonomyName);
deleteResource(taxonomyResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(
new Results(ui.getRequestUri().toString(), 200)).build();
}
@GET
@Path("{taxonomyName}/terms/{termName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
......@@ -172,6 +188,22 @@ public class TaxonomyService extends BaseService {
new Results(ui.getRequestUri().toString(), 201)).build();
}
@DELETE
@Path("{taxonomyName}/terms/{termName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response deleteTerm(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("termName") String termName) throws CatalogException {
Map<String, Object> properties = new HashMap<>();
properties.put("termPath", new TermPath(taxonomyName, termName));
deleteResource(termResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(
new Results(ui.getRequestUri().toString(), 200)).build();
}
@POST
@Path("{taxonomyName}/terms/{termName}/{remainder:.*}")
@Produces(Servlets.JSON_MEDIA_TYPE)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment