Commit 89f70609 by Jeff Hagelberg

ATLAS-1114: Performance improvements for create/update entity (2 of 2)

parent 6cd68119
......@@ -18,17 +18,17 @@
package org.apache.atlas.repository.audit;
import org.apache.atlas.AtlasException;
import org.apache.atlas.EntityAuditEvent;
import com.google.inject.Singleton;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.atlas.AtlasException;
import org.apache.atlas.EntityAuditEvent;
import com.google.inject.Singleton;
/**
* Entity audit repository where audit events are stored in-memory. Used only for integration tests
*/
......@@ -50,8 +50,10 @@ public class InMemoryEntityAuditRepository implements EntityAuditRepository {
}
}
//synchronized to avoid concurrent modification exception that occurs if events are added
//while we are iterating through the map
@Override
public List<EntityAuditEvent> listEvents(String entityId, String startKey, short maxResults)
public synchronized List<EntityAuditEvent> listEvents(String entityId, String startKey, short maxResults)
throws AtlasException {
List<EntityAuditEvent> events = new ArrayList<>();
String myStartKey = startKey;
......
......@@ -22,6 +22,7 @@ import static org.apache.atlas.repository.graph.GraphHelper.EDGE_LABEL_PREFIX;
import static org.apache.atlas.repository.graph.GraphHelper.string;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
......@@ -70,7 +71,7 @@ public abstract class DeleteHandler {
* @param instanceVertices
* @throws AtlasException
*/
public void deleteEntities(List<AtlasVertex> instanceVertices) throws AtlasException {
public void deleteEntities(Collection<AtlasVertex> instanceVertices) throws AtlasException {
RequestContext requestContext = RequestContext.get();
Set<AtlasVertex> deletionCandidateVertices = new HashSet<>();
......
......@@ -17,10 +17,15 @@
*/
package org.apache.atlas.repository.graph;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.typesystem.ITypedInstance;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.EnumValue;
......@@ -29,23 +34,22 @@ import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class FullTextMapper {
private static final Logger LOG = LoggerFactory.getLogger(FullTextMapper.class);
private final GraphToTypedInstanceMapper graphToTypedInstanceMapper;
private final TypedInstanceToGraphMapper typedInstanceToGraphMapper;
private static final GraphHelper graphHelper = GraphHelper.getInstance();
private static final String FULL_TEXT_DELIMITER = " ";
private final Map<String, ITypedReferenceableInstance> instanceCache;
FullTextMapper(GraphToTypedInstanceMapper graphToTypedInstanceMapper) {
FullTextMapper(TypedInstanceToGraphMapper typedInstanceToGraphMapper,
GraphToTypedInstanceMapper graphToTypedInstanceMapper) {
this.graphToTypedInstanceMapper = graphToTypedInstanceMapper;
this.typedInstanceToGraphMapper = typedInstanceToGraphMapper;
instanceCache = new HashMap<>();
}
......@@ -126,8 +130,12 @@ public class FullTextMapper {
case CLASS:
if (followReferences) {
String refGuid = ((ITypedReferenceableInstance) value).getId()._getId();
AtlasVertex refVertex = graphHelper.getVertexForGUID(refGuid);
Id refId = ((ITypedReferenceableInstance) value).getId();
String refGuid = refId._getId();
AtlasVertex refVertex = typedInstanceToGraphMapper.lookupVertex(refId);
if(refVertex == null) {
refVertex = graphHelper.getVertexForGUID(refGuid);
}
return mapRecursive(refVertex, false);
}
break;
......
......@@ -19,9 +19,11 @@
package org.apache.atlas.repository.graph;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
......@@ -357,8 +359,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_FULL,
entitiesUpdated);
RequestContext requestContext = RequestContext.get();
return new AtlasClient.EntityResult(requestContext.getCreatedEntityIds(),
requestContext.getUpdatedEntityIds(), requestContext.getDeletedEntityIds());
return createEntityResultFromContext(requestContext);
} catch (AtlasException e) {
throw new RepositoryException(e);
}
......@@ -375,13 +376,14 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper, deleteHandler);
instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_PARTIAL, entity);
RequestContext requestContext = RequestContext.get();
return new AtlasClient.EntityResult(requestContext.getCreatedEntityIds(),
requestContext.getUpdatedEntityIds(), requestContext.getDeletedEntityIds());
return createEntityResultFromContext(requestContext);
} catch (AtlasException e) {
throw new RepositoryException(e);
}
}
@Override
@GraphTransaction
public AtlasClient.EntityResult deleteEntities(List<String> guids) throws RepositoryException {
......@@ -390,32 +392,41 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
throw new IllegalArgumentException("guids must be non-null and non-empty");
}
List<AtlasVertex> vertices = new ArrayList<>(guids.size());
for (String guid : guids) {
if (guid == null) {
LOG.warn("deleteEntities: Ignoring null guid");
continue;
}
try {
AtlasVertex instanceVertex = graphHelper.getVertexForGUID(guid);
vertices.add(instanceVertex);
} catch (EntityNotFoundException e) {
// Retrieve vertices for requested guids.
Map<String, AtlasVertex> vertices = graphHelper.getVerticesForGUIDs(guids);
Collection<AtlasVertex> deletionCandidates = vertices.values();
if(LOG.isDebugEnabled()) {
for(String guid : guids) {
if(! vertices.containsKey(guid)) {
// Entity does not exist - treat as non-error, since the caller
// wanted to delete the entity and it's already gone.
LOG.info("Deletion request ignored for non-existent entity with guid {}", guid);
LOG.debug("Deletion request ignored for non-existent entity with guid " + guid);
}
}
}
if (deletionCandidates.isEmpty()) {
LOG.info("No deletion candidate entities were found for guids %s", guids);
return new AtlasClient.EntityResult(Collections.<String>emptyList(), Collections.<String>emptyList(), Collections.<String>emptyList());
}
try {
deleteHandler.deleteEntities(vertices);
deleteHandler.deleteEntities(deletionCandidates);
}
catch (AtlasException e) {
throw new RepositoryException(e);
}
RequestContext requestContext = RequestContext.get();
return new AtlasClient.EntityResult(requestContext.getCreatedEntityIds(),
requestContext.getUpdatedEntityIds(), requestContext.getDeletedEntityIds());
return createEntityResultFromContext(requestContext);
}
private AtlasClient.EntityResult createEntityResultFromContext(RequestContext requestContext) {
return new AtlasClient.EntityResult(
requestContext.getCreatedEntityIds(),
requestContext.getUpdatedEntityIds(),
requestContext.getDeletedEntityIds());
}
public AtlasGraph getGraph() {
......
......@@ -18,9 +18,20 @@
package org.apache.atlas.repository.graph;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.Stack;
import java.util.UUID;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasException;
import org.apache.atlas.RequestContext;
......@@ -51,13 +62,17 @@ import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.typesystem.types.ValueConversionException;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.apache.atlas.util.AttributeValueMap;
import org.apache.atlas.util.IndexedInstance;
import org.apache.atlas.utils.ParamChecker;
import org.apache.commons.lang.StringUtils;
import org.codehaus.jettison.json.JSONArray;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
/**
* Utility class for graph operations.
......@@ -516,6 +531,60 @@ public final class GraphHelper {
return findVertex(Constants.GUID_PROPERTY_KEY, guid);
}
/**
* Finds the Vertices that correspond to the given property values. Property
* values that are not found in the graph will not be in the map.
*
* @return propertyValue to AtlasVertex map with the result.
*/
private Map<String, AtlasVertex> getVerticesForPropertyValues(String property, List<String> values)
throws RepositoryException {
if(values.isEmpty()) {
return Collections.emptyMap();
}
Collection<String> nonNullValues = new HashSet<>(values.size());
for(String value : values) {
if(value != null) {
nonNullValues.add(value);
}
}
//create graph query that finds vertices with the guids
AtlasGraphQuery query = graph.query();
query.in(property, nonNullValues);
Iterable<AtlasVertex> results = query.vertices();
Map<String, AtlasVertex> result = new HashMap<>(values.size());
//Process the result, using the guidToIndexMap to figure out where
//each vertex should go in the result list.
for(AtlasVertex vertex : results) {
if(vertex.exists()) {
String propertyValue = vertex.getProperty(property, String.class);
if(LOG.isDebugEnabled()) {
LOG.debug("Found a vertex {} with {} = {}", string(vertex), property, propertyValue);
}
result.put(propertyValue, vertex);
}
}
return result;
}
/**
* Finds the Vertices that correspond to the given GUIDs. GUIDs
* that are not found in the graph will not be in the map.
*
* @return GUID to AtlasVertex map with the result.
*/
public Map<String, AtlasVertex> getVerticesForGUIDs(List<String> guids)
throws RepositoryException {
return getVerticesForPropertyValues(Constants.GUID_PROPERTY_KEY, guids);
}
public static String getQualifiedNameForMapKey(String prefix, String key) {
return prefix + "." + key;
}
......@@ -638,6 +707,112 @@ public final class GraphHelper {
}
/**
* Finds vertices that match at least one unique attribute of the instances specified. The AtlasVertex at a given index in the result corresponds
* to the IReferencableInstance at that same index that was passed in. The number of elements in the resultant list is guaranteed to match the
* number of instances that were passed in. If no vertex is found for a given instance, that entry will be null in the resultant list.
*
*
* @param classType
* @param instancesForClass
* @return
* @throws AtlasException
*/
public List<AtlasVertex> getVerticesForInstancesByUniqueAttribute(ClassType classType, List<? extends IReferenceableInstance> instancesForClass) throws AtlasException {
//For each attribute, need to figure out what values to search for and which instance(s)
//those values correspond to.
Map<String, AttributeValueMap> map = new HashMap<String, AttributeValueMap>();
for (AttributeInfo attributeInfo : classType.fieldMapping().fields.values()) {
if (attributeInfo.isUnique) {
String propertyKey = getQualifiedFieldName(classType, attributeInfo.name);
AttributeValueMap mapForAttribute = new AttributeValueMap();
for(int idx = 0; idx < instancesForClass.size(); idx++) {
IReferenceableInstance instance = instancesForClass.get(idx);
Object value = instance.get(attributeInfo.name);
mapForAttribute.put(value, instance, idx);
}
map.put(propertyKey, mapForAttribute);
}
}
AtlasVertex[] result = new AtlasVertex[instancesForClass.size()];
if(map.isEmpty()) {
//no unique attributes
return Arrays.asList(result);
}
//construct gremlin query
AtlasGraphQuery query = graph.query();
query.has(Constants.ENTITY_TYPE_PROPERTY_KEY, classType.getName());
query.has(Constants.STATE_PROPERTY_KEY,Id.EntityState.ACTIVE.name());
List<AtlasGraphQuery> orChildren = new ArrayList<AtlasGraphQuery>();
//build up an or expression to find vertices which match at least one of the unique attribute constraints
//For each unique attributes, we add a within clause to match vertices that have a value of that attribute
//that matches the value in some instance.
for(Map.Entry<String, AttributeValueMap> entry : map.entrySet()) {
AtlasGraphQuery orChild = query.createChildQuery();
String propertyName = entry.getKey();
AttributeValueMap valueMap = entry.getValue();
Set<Object> values = valueMap.getAttributeValues();
if(values.size() == 1) {
orChild.has(propertyName, values.iterator().next());
}
else if(values.size() > 1) {
orChild.in(propertyName, values);
}
orChildren.add(orChild);
}
if(orChildren.size() == 1) {
AtlasGraphQuery child = orChildren.get(0);
query.addConditionsFrom(child);
}
else if(orChildren.size() > 1) {
query.or(orChildren);
}
Iterable<AtlasVertex> queryResult = query.vertices();
for(AtlasVertex matchingVertex : queryResult) {
Collection<IndexedInstance> matches = getInstancesForVertex(map, matchingVertex);
for(IndexedInstance wrapper : matches) {
result[wrapper.getIndex()]= matchingVertex;
}
}
return Arrays.asList(result);
}
//finds the instance(s) that correspond to the given vertex
private Collection<IndexedInstance> getInstancesForVertex(Map<String, AttributeValueMap> map, AtlasVertex foundVertex) {
//loop through the unique attributes. For each attribute, check to see if the vertex property that
//corresponds to that attribute has a value from one or more of the instances that were passed in.
for(Map.Entry<String, AttributeValueMap> entry : map.entrySet()) {
String propertyName = entry.getKey();
AttributeValueMap valueMap = entry.getValue();
Object vertexValue = foundVertex.getProperty(propertyName, Object.class);
Collection<IndexedInstance> instances = valueMap.get(vertexValue);
if(instances != null && instances.size() > 0) {
//return first match. Let the underling graph determine if this is a problem
//(i.e. if the other unique attributes change be changed safely to match what
//the user requested).
return instances;
}
//try another attribute
}
return Collections.emptyList();
}
/**
* Guid and AtlasVertex combo
*/
public static class VertexInfo {
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.graph;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.IDataType;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
/**
* Helper class for TypedInstanceGraphMapper. Determines which instances
* should be loaded by GUID and which ones should be loaded by unique attribute.
* In addition, it sorts the instances that should be loaded by unique
* attribute by class.
*
*/
public class VertexLookupContext {
private final TypedInstanceToGraphMapper mapper;
private static final TypeSystem typeSystem = TypeSystem.getInstance();
private Map<ClassType,List<IReferenceableInstance>> instancesWithoutGuids = new HashMap<>();
private Set<Id> guidsToLookup = new HashSet<>();
/**
* @param typedInstanceToGraphMapper
*/
VertexLookupContext(TypedInstanceToGraphMapper typedInstanceToGraphMapper) {
mapper = typedInstanceToGraphMapper;
}
/**
* Adds an instance to be loaded.
*
*/
public void addInstance(IReferenceableInstance instance) throws AtlasException {
ClassType classType = typeSystem.getDataType(ClassType.class, instance.getTypeName());
ITypedReferenceableInstance newInstance = classType.convert(instance, Multiplicity.REQUIRED);
findReferencedInstancesToPreLoad(newInstance);
Id id = instance.getId();
if(mapper.lookupVertex(id) == null) {
if(id.isAssigned()) {
guidsToLookup.add(id);
}
else {
addToClassMap(classType, instance);
}
}
}
/**
* Returns the instances that should be loaded by unique attribute, sorted by
* class.
*
*/
public Map<ClassType,List<IReferenceableInstance>> getInstancesToLoadByUniqueAttribute() {
return instancesWithoutGuids;
}
/**
* Returns the Ids of the instance that should be loaded by GUID
*
* @return
*/
public Set<Id> getInstancesToLoadByGuid() {
return guidsToLookup;
}
private void addToClassMap(ClassType classType, IReferenceableInstance instance) throws AtlasException {
List<IReferenceableInstance> toUpdate = instancesWithoutGuids.get(classType);
if(toUpdate == null) {
toUpdate = new ArrayList<>();
instancesWithoutGuids.put(classType, toUpdate);
}
toUpdate.add(instance);
}
private void findReferencedInstancesToPreLoad(ITypedReferenceableInstance newInstance) throws AtlasException {
//pre-load vertices for reference fields
for(AttributeInfo info : newInstance.fieldMapping().fields.values()) {
if(info.dataType().getTypeCategory() == TypeCategory.CLASS) {
ITypedReferenceableInstance newAttributeValue = (ITypedReferenceableInstance)newInstance.get(info.name);
addAdditionalInstance(newAttributeValue);
}
if(info.dataType().getTypeCategory() == TypeCategory.ARRAY) {
IDataType elementType = ((DataTypes.ArrayType) info.dataType()).getElemType();
if(elementType.getTypeCategory() == TypeCategory.CLASS) {
List<ITypedReferenceableInstance> newElements = (List) newInstance.get(info.name);
addAdditionalInstances(newElements);
}
}
if(info.dataType().getTypeCategory() == TypeCategory.MAP) {
IDataType elementType = ((DataTypes.MapType) info.dataType()).getValueType();
if(elementType.getTypeCategory() == TypeCategory.CLASS) {
Map<Object, ITypedReferenceableInstance> newAttribute =
(Map<Object, ITypedReferenceableInstance>) newInstance.get(info.name);
if(newAttribute != null) {
addAdditionalInstances(newAttribute.values());
}
}
}
}
}
private void addAdditionalInstance(ITypedReferenceableInstance instance) {
if(instance == null) {
return;
}
Id id = mapper.getExistingId(instance);
if(! id.isAssigned()) {
return;
}
guidsToLookup.add(id);
}
private void addAdditionalInstances(Collection<ITypedReferenceableInstance> newElements) {
if(newElements != null) {
for(ITypedReferenceableInstance instance: newElements) {
addAdditionalInstance(instance);
}
}
}
}
\ No newline at end of file
......@@ -367,6 +367,10 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang
private void validateUniqueAttribute(String entityType, String attributeName) throws AtlasException {
ClassType type = typeSystem.getDataType(ClassType.class, entityType);
AttributeInfo attribute = type.fieldMapping().fields.get(attributeName);
if(attribute == null) {
throw new IllegalArgumentException(
String.format("%s is not an attribute in %s", attributeName, entityType));
}
if (!attribute.isUnique) {
throw new IllegalArgumentException(
String.format("%s.%s is not a unique attribute", entityType, attributeName));
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.util;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.typesystem.IReferenceableInstance;
/**
* Map of attribute values to a collection of IndexedInstances with that attribute value.
*
* @see GraphHelper#getVerticesForInstancesByUniqueAttributes
*
*/
public class AttributeValueMap {
//need collection in case they are adding the same entity twice?
private Map<Object,Collection<IndexedInstance>> valueMap_ = new HashMap<>();
public void put(Object value, IReferenceableInstance instance, int index) {
IndexedInstance wrapper = new IndexedInstance(instance, index);
Collection<IndexedInstance> existingValues = valueMap_.get(value);
if(existingValues == null) {
//only expect 1 value
existingValues = new HashSet<>(1);
valueMap_.put(value, existingValues);
}
existingValues.add(wrapper);
}
public Collection<IndexedInstance> get(Object value) {
return valueMap_.get(value);
}
public Set<Object> getAttributeValues() {
return valueMap_.keySet();
}
}
\ No newline at end of file
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.util;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.typesystem.IReferenceableInstance;
/**
* Data structure that stores an IReferenceableInstance and its location within
* a list.
*
* @see GraphHelper#getVerticesForInstancesByUniqueAttributes
*/
public class IndexedInstance {
private final IReferenceableInstance instance_;
private final int index_;
public IndexedInstance(IReferenceableInstance instance, int index) {
super();
this.instance_ = instance;
this.index_ = index;
}
public IReferenceableInstance getInstance() {
return instance_;
}
public int getIndex() {
return index_;
}
@Override
public int hashCode() {
return instance_.hashCode();
}
@Override
public boolean equals(Object other) {
if(!(other instanceof IndexedInstance)) {
return false;
}
IndexedInstance otherInstance = (IndexedInstance)other;
return instance_.equals(otherInstance.getInstance());
}
}
\ No newline at end of file
......@@ -18,15 +18,42 @@
package org.apache.atlas.repository.graph;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import org.apache.atlas.AtlasException;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.TestUtils;
import org.apache.atlas.repository.graph.GraphHelper.VertexInfo;
import org.apache.atlas.repository.graphdb.AtlasEdge;
import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.TypesDef;
import org.apache.atlas.typesystem.exception.TypeNotFoundException;
import org.apache.atlas.typesystem.json.InstanceSerialization;
import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.codehaus.jettison.json.JSONArray;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
......@@ -34,20 +61,6 @@ import org.testng.annotations.DataProvider;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
@Guice(modules = RepositoryMetadataModule.class)
public class GraphHelperTest {
......@@ -69,6 +82,9 @@ public class GraphHelperTest {
}
@Inject
private MetadataService metadataService;
@Inject
private GraphBackedMetadataRepository repositoryService;
private TypeSystem typeSystem;
......@@ -82,7 +98,12 @@ public class GraphHelperTest {
typeSystem.reset();
new GraphBackedSearchIndexer(typeRegistry);
TypesDef typesDef = TestUtils.defineHiveTypes();
try {
metadataService.getTypeDefinition(TestUtils.TABLE_TYPE);
} catch (TypeNotFoundException e) {
metadataService.createType(TypesSerialization.toJson(typesDef));
}
TestUtils.defineDeptEmployeeTypes(typeSystem);
}
......@@ -92,6 +113,43 @@ public class GraphHelperTest {
}
@Test
public void testGetInstancesByUniqueAttributes() throws Exception {
GraphHelper helper = GraphHelper.getInstance();
List<ITypedReferenceableInstance> instances = new ArrayList<>();
List<String> guids = new ArrayList<>();
TypeSystem ts = TypeSystem.getInstance();
ClassType dbType = ts.getDataType(ClassType.class, TestUtils.DATABASE_TYPE);
for(int i = 0; i < 10; i++) {
Referenceable db = TestUtils.createDBEntity();
String guid = createInstance(db);
ITypedReferenceableInstance instance = convert(db, dbType);
instances.add(instance);
guids.add(guid);
}
//lookup vertices via getVertexForInstanceByUniqueAttributes
List<AtlasVertex> vertices = helper.getVerticesForInstancesByUniqueAttribute(dbType, instances);
assertEquals(instances.size(), vertices.size());
//assert vertex matches the vertex we get through getVertexForGUID
for(int i = 0; i < instances.size(); i++) {
String guid = guids.get(i);
AtlasVertex foundVertex = vertices.get(i);
AtlasVertex expectedVertex = helper.getVertexForGUID(guid);
assertEquals(foundVertex, expectedVertex);
}
}
@Test
public void testGetVerticesForGUIDSWithDuplicates() throws Exception {
ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(TypeSystem.getInstance());
List<String> result = repositoryService.createEntities(hrDept);
String guid = result.get(0);
Map<String, AtlasVertex> verticesForGUIDs = GraphHelper.getInstance().getVerticesForGUIDs(Arrays.asList(guid, guid));
Assert.assertEquals(verticesForGUIDs.size(), 1);
Assert.assertTrue(verticesForGUIDs.containsKey(guid));
}
@Test
public void testGetCompositeGuidsAndVertices() throws Exception {
ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(typeSystem);
List<String> createdGuids = repositoryService.createEntities(hrDept);
......@@ -144,4 +202,22 @@ public class GraphHelperTest {
assertFalse(iterator.hasNext());
assertFalse(iterator.hasNext());
}
private ITypedReferenceableInstance convert(Referenceable instance, ClassType type) throws AtlasException {
return type.convert(instance, Multiplicity.REQUIRED);
}
private String createInstance(Referenceable entity) throws Exception {
TestUtils.resetRequestContext();
String entityjson = InstanceSerialization.toJson(entity, true);
JSONArray entitiesJson = new JSONArray();
entitiesJson.put(entityjson);
List<String> guids = metadataService.createEntities(entitiesJson.toString());
if (guids != null && guids.size() > 0) {
return guids.get(guids.size() - 1);
}
return null;
}
}
......@@ -77,6 +77,7 @@ atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address}
######### Hive Lineage Configs #########
## Schema
atlas.lineage.schema.query.hive_table=hive_table where __guid='%s'\, columns
atlas.lineage.schema.query.hive_table_v1=hive_table_v1 where __guid='%s'\, columns
######### Notification Configs #########
atlas.notification.embedded=true
......
......@@ -144,7 +144,7 @@ public class EntityResource {
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.CONFLICT));
} catch (ValueConversionException ve) {
LOG.error("Unable to persist entity instance due to a deserialization error entityDef={}", entityJson, ve);
throw new WebApplicationException(Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST));
throw new WebApplicationException(Servlets.getErrorResponse(ve.getCause() != null ? ve.getCause() : ve, Response.Status.BAD_REQUEST));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to persist entity instance entityDef={}", entityJson, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
......
......@@ -51,8 +51,6 @@ import static org.testng.Assert.assertTrue;
@Guice(modules = NotificationModule.class)
public class EntityNotificationIT extends BaseResourceIT {
private static final String ENTITIES = "api/atlas/entities";
private static final String TRAITS = "traits";
private final String DATABASE_NAME = "db" + randomString();
private final String TABLE_NAME = "table" + randomString();
@Inject
......@@ -66,7 +64,7 @@ public class EntityNotificationIT extends BaseResourceIT {
public void setUp() throws Exception {
super.setUp();
createTypeDefinitionsV1();
Referenceable HiveDBInstance = createHiveDBInstanceV1(DATABASE_NAME);
Referenceable HiveDBInstance = createHiveDBInstanceBuiltIn(DATABASE_NAME);
dbId = createInstance(HiveDBInstance);
List<NotificationConsumer<EntityNotification>> consumers =
......@@ -77,13 +75,13 @@ public class EntityNotificationIT extends BaseResourceIT {
@Test
public void testCreateEntity() throws Exception {
Referenceable tableInstance = createHiveTableInstanceV1(DATABASE_NAME, TABLE_NAME, dbId);
Referenceable tableInstance = createHiveTableInstanceBuiltIn(DATABASE_NAME, TABLE_NAME, dbId);
tableId = createInstance(tableInstance);
final String guid = tableId._getId();
waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE, guid));
newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE_BUILTIN, guid));
}
@Test(dependsOnMethods = "testCreateEntity")
......@@ -96,29 +94,29 @@ public class EntityNotificationIT extends BaseResourceIT {
atlasClientV1.updateEntityAttribute(guid, property, newValue);
waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_UPDATE, HIVE_TABLE_TYPE, guid));
newNotificationPredicate(EntityNotification.OperationType.ENTITY_UPDATE, HIVE_TABLE_TYPE_BUILTIN, guid));
}
@Test
public void testDeleteEntity() throws Exception {
final String tableName = "table-" + randomString();
final String dbName = "db-" + randomString();
Referenceable HiveDBInstance = createHiveDBInstanceV1(dbName);
Referenceable HiveDBInstance = createHiveDBInstanceBuiltIn(dbName);
Id dbId = createInstance(HiveDBInstance);
Referenceable tableInstance = createHiveTableInstanceV1(dbName, tableName, dbId);
Referenceable tableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId);
final Id tableId = createInstance(tableInstance);
final String guid = tableId._getId();
waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE, guid));
newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE_BUILTIN, guid));
final String name = (String) tableInstance.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME);
atlasClientV1.deleteEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
atlasClientV1.deleteEntity(HIVE_TABLE_TYPE_BUILTIN, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_DELETE, HIVE_TABLE_TYPE, guid));
newNotificationPredicate(EntityNotification.OperationType.ENTITY_DELETE, HIVE_TABLE_TYPE_BUILTIN, guid));
}
@Test(dependsOnMethods = "testCreateEntity")
......@@ -141,7 +139,7 @@ public class EntityNotificationIT extends BaseResourceIT {
atlasClientV1.addTrait(guid, traitInstance);
EntityNotification entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.TRAIT_ADD, HIVE_TABLE_TYPE, guid));
newNotificationPredicate(EntityNotification.OperationType.TRAIT_ADD, HIVE_TABLE_TYPE_BUILTIN, guid));
IReferenceableInstance entity = entityNotification.getEntity();
assertTrue(entity.getTraits().contains(traitName));
......@@ -166,7 +164,7 @@ public class EntityNotificationIT extends BaseResourceIT {
atlasClientV1.addTrait(guid, traitInstance);
entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.TRAIT_ADD, HIVE_TABLE_TYPE, guid));
newNotificationPredicate(EntityNotification.OperationType.TRAIT_ADD, HIVE_TABLE_TYPE_BUILTIN, guid));
allTraits = entityNotification.getAllTraits();
allTraitNames = new LinkedList<>();
......@@ -187,7 +185,7 @@ public class EntityNotificationIT extends BaseResourceIT {
atlasClientV1.deleteTrait(guid, traitName);
EntityNotification entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.TRAIT_DELETE, HIVE_TABLE_TYPE, guid));
newNotificationPredicate(EntityNotification.OperationType.TRAIT_DELETE, HIVE_TABLE_TYPE_BUILTIN, guid));
assertFalse(entityNotification.getEntity().getTraits().contains(traitName));
}
......
......@@ -68,7 +68,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
new Referenceable(randomString())));
//send valid message
final Referenceable entity = new Referenceable(DATABASE_TYPE);
final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
String dbName = "db" + randomString();
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
......@@ -79,7 +79,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, entity.get(NAME)));
JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE_BUILTIN, entity.get(NAME)));
return results.length() == 1;
}
});
......@@ -87,7 +87,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
@Test
public void testCreateEntity() throws Exception {
final Referenceable entity = new Referenceable(DATABASE_TYPE);
final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
String dbName = "db" + randomString();
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
......@@ -99,13 +99,13 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE, entity.get(QUALIFIED_NAME)));
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, entity.get(QUALIFIED_NAME)));
return results.length() == 1;
}
});
//Assert that user passed in hook message is used in audit
Referenceable instance = atlasClientV1.getEntity(DATABASE_TYPE, QUALIFIED_NAME, (String) entity.get(QUALIFIED_NAME));
Referenceable instance = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, (String) entity.get(QUALIFIED_NAME));
List<EntityAuditEvent> events =
atlasClientV1.getEntityAuditEvents(instance.getId()._getId(), (short) 1);
assertEquals(events.size(), 1);
......@@ -114,7 +114,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
@Test
public void testUpdateEntityPartial() throws Exception {
final Referenceable entity = new Referenceable(DATABASE_TYPE);
final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
final String dbName = "db" + randomString();
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
......@@ -123,26 +123,26 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
atlasClientV1.createEntity(entity);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN);
newEntity.set("owner", randomString());
sendHookMessage(
new HookNotification.EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE, QUALIFIED_NAME, dbName, newEntity));
new HookNotification.EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName, newEntity));
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
Referenceable localEntity = atlasClientV1.getEntity(DATABASE_TYPE, QUALIFIED_NAME, dbName);
Referenceable localEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName);
return (localEntity.get("owner") != null && localEntity.get("owner").equals(newEntity.get("owner")));
}
});
//Its partial update and un-set fields are not updated
Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE, QUALIFIED_NAME, dbName);
Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName);
assertEquals(actualEntity.get(DESCRIPTION), entity.get(DESCRIPTION));
}
@Test
public void testUpdatePartialUpdatingQualifiedName() throws Exception {
final Referenceable entity = new Referenceable(DATABASE_TYPE);
final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
final String dbName = "db" + randomString();
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
......@@ -151,29 +151,29 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
atlasClientV1.createEntity(entity);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN);
final String newName = "db" + randomString();
newEntity.set(QUALIFIED_NAME, newName);
sendHookMessage(
new HookNotification.EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE, QUALIFIED_NAME, dbName, newEntity));
new HookNotification.EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName, newEntity));
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE, newName));
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, newName));
return results.length() == 1;
}
});
//no entity with the old qualified name
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE, dbName));
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, dbName));
assertEquals(results.length(), 0);
}
@Test
public void testDeleteByQualifiedName() throws Exception {
Referenceable entity = new Referenceable(DATABASE_TYPE);
Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
final String dbName = "db" + randomString();
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
......@@ -183,7 +183,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
final String dbId = atlasClientV1.createEntity(entity).get(0);
sendHookMessage(
new HookNotification.EntityDeleteRequest(TEST_USER, DATABASE_TYPE, QUALIFIED_NAME, dbName));
new HookNotification.EntityDeleteRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName));
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
......@@ -195,7 +195,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
@Test
public void testUpdateEntityFullUpdate() throws Exception {
Referenceable entity = new Referenceable(DATABASE_TYPE);
Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
final String dbName = "db" + randomString();
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
......@@ -204,7 +204,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
atlasClientV1.createEntity(entity);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN);
newEntity.set(NAME, randomString());
newEntity.set(DESCRIPTION, randomString());
newEntity.set("owner", randomString());
......@@ -216,12 +216,12 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE, newEntity.get(QUALIFIED_NAME)));
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, newEntity.get(QUALIFIED_NAME)));
return results.length() == 1;
}
});
Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE, QUALIFIED_NAME, dbName);
Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName);
assertEquals(actualEntity.get(DESCRIPTION), newEntity.get(DESCRIPTION));
assertEquals(actualEntity.get("owner"), newEntity.get("owner"));
}
......
......@@ -95,6 +95,12 @@ public abstract class BaseResourceIT {
@BeforeClass
public void setUp() throws Exception {
//set high timeouts so that tests do not fail due to read timeouts while you
//are stepping through the code in a debugger
ApplicationProperties.get().setProperty("atlas.client.readTimeoutMSecs", "100000000");
ApplicationProperties.get().setProperty("atlas.client.connectTimeoutMSecs", "100000000");
Configuration configuration = ApplicationProperties.get();
atlasUrls = configuration.getStringArray(ATLAS_REST_ADDRESS);
......@@ -221,13 +227,18 @@ public abstract class BaseResourceIT {
try {
if (!update) {
entity = entitiesClientV2.createEntity(atlasEntity);
assertNotNull(entity);
assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE));
assertTrue(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).size() > 0);
return entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).get(0);
} else {
entity = entitiesClientV2.updateEntity(atlasEntity);
}
assertNotNull(entity);
assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE));
assertTrue(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).size() > 0);
return entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).get(0);
}
} catch (AtlasServiceException e) {
LOG.error("Entity {} failed", update ? "update" : "creation", entity);
}
......@@ -242,10 +253,20 @@ public abstract class BaseResourceIT {
return modifyEntity(atlasEntity, true);
}
protected static final String DATABASE_TYPE = "hive_db";
protected static final String HIVE_TABLE_TYPE = "hive_table";
protected static final String COLUMN_TYPE = "hive_column";
protected static final String HIVE_PROCESS_TYPE = "hive_process";
protected static final String DATABASE_TYPE_V2 = "hive_db_v2";
protected static final String HIVE_TABLE_TYPE_V2 = "hive_table_v2";
protected static final String COLUMN_TYPE_V2 = "hive_column_v2";
protected static final String HIVE_PROCESS_TYPE_V2 = "hive_process_v2";
protected static final String DATABASE_TYPE = "hive_db_v1";
protected static final String HIVE_TABLE_TYPE = "hive_table_v1";
protected static final String COLUMN_TYPE = "hive_column_v1";
protected static final String HIVE_PROCESS_TYPE = "hive_process_v1";
protected static final String DATABASE_TYPE_BUILTIN = "hive_db";
protected static final String HIVE_TABLE_TYPE_BUILTIN = "hive_table";
protected static final String COLUMN_TYPE_BUILTIN = "hive_column";
protected static final String HIVE_PROCESS_TYPE_BUILTIN = "hive_process";
protected void createTypeDefinitionsV1() throws Exception {
HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
......@@ -323,7 +344,7 @@ public abstract class BaseResourceIT {
protected void createTypeDefinitionsV2() throws Exception {
AtlasEntityDef dbClsTypeDef = AtlasTypeUtil.createClassTypeDef(
DATABASE_TYPE,
DATABASE_TYPE_V2,
null,
AtlasTypeUtil.createUniqueRequiredAttrDef(NAME, "string"),
AtlasTypeUtil.createRequiredAttrDef(DESCRIPTION, "string"),
......@@ -332,7 +353,7 @@ public abstract class BaseResourceIT {
AtlasTypeUtil.createOptionalAttrDef("createTime", "int"));
AtlasEntityDef columnClsDef = AtlasTypeUtil
.createClassTypeDef(COLUMN_TYPE, null,
.createClassTypeDef(COLUMN_TYPE_V2, null,
AtlasTypeUtil.createOptionalAttrDef(NAME, "string"),
AtlasTypeUtil.createOptionalAttrDef("dataType", "string"),
AtlasTypeUtil.createOptionalAttrDef("comment", "string"));
......@@ -348,20 +369,21 @@ public abstract class BaseResourceIT {
));
AtlasEntityDef tblClsDef = AtlasTypeUtil
.createClassTypeDef(HIVE_TABLE_TYPE,
.createClassTypeDef(HIVE_TABLE_TYPE_V2,
ImmutableSet.of("DataSet"),
AtlasTypeUtil.createOptionalAttrDef("owner", "string"),
AtlasTypeUtil.createOptionalAttrDef("createTime", "long"),
AtlasTypeUtil.createOptionalAttrDef("lastAccessTime", "date"),
AtlasTypeUtil.createOptionalAttrDef("temporary", "boolean"),
AtlasTypeUtil.createRequiredAttrDef("db", DATABASE_TYPE),
AtlasTypeUtil.createRequiredAttrDef("columns", DataTypes.arrayTypeName(COLUMN_TYPE)),
AtlasTypeUtil.createRequiredAttrDef("db", DATABASE_TYPE_V2),
//some tests don't set the columns field or set it to null...
AtlasTypeUtil.createOptionalAttrDef("columns", DataTypes.arrayTypeName(COLUMN_TYPE_V2)),
AtlasTypeUtil.createOptionalAttrDef("tableType", "tableType"),
AtlasTypeUtil.createOptionalAttrDef("serde1", "serdeType"),
AtlasTypeUtil.createOptionalAttrDef("serde2", "serdeType"));
AtlasEntityDef loadProcessClsDef = AtlasTypeUtil
.createClassTypeDef(HIVE_PROCESS_TYPE,
.createClassTypeDef(HIVE_PROCESS_TYPE_V2,
ImmutableSet.of("Process"),
AtlasTypeUtil.createOptionalAttrDef("userName", "string"),
AtlasTypeUtil.createOptionalAttrDef("startTime", "int"),
......@@ -415,7 +437,7 @@ public abstract class BaseResourceIT {
return RandomStringUtils.randomAlphabetic(1) + RandomStringUtils.randomAlphanumeric(9);
}
protected Referenceable createHiveTableInstanceV1(String dbName, String tableName, Id dbId) throws Exception {
protected Referenceable createHiveTableInstanceBuiltIn(String dbName, String tableName, Id dbId) throws Exception {
Map<String, Object> values = new HashMap<>();
values.put(NAME, dbName);
values.put(DESCRIPTION, "foo database");
......@@ -426,7 +448,7 @@ public abstract class BaseResourceIT {
values.put("location", "/tmp");
Referenceable databaseInstance = new Referenceable(dbId._getId(), dbId.getTypeName(), values);
Referenceable tableInstance =
new Referenceable(HIVE_TABLE_TYPE, "classification", "pii", "phi", "pci", "sox", "sec", "finance");
new Referenceable(HIVE_TABLE_TYPE_BUILTIN, "classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set(NAME, tableName);
tableInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
tableInstance.set("db", databaseInstance);
......@@ -458,7 +480,7 @@ public abstract class BaseResourceIT {
protected AtlasEntityWithAssociations createHiveTableInstanceV2(AtlasEntity databaseInstance, String tableName) throws Exception {
AtlasEntityWithAssociations tableInstance =
new AtlasEntityWithAssociations(HIVE_TABLE_TYPE);
new AtlasEntityWithAssociations(HIVE_TABLE_TYPE_V2);
tableInstance.setClassifications(
Arrays.asList(new AtlasClassification("classification"),
new AtlasClassification("pii"),
......@@ -497,29 +519,34 @@ public abstract class BaseResourceIT {
return tableInstance;
}
protected Referenceable createHiveDBInstanceBuiltIn(String dbName) {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE_BUILTIN);
databaseInstance.set(NAME, dbName);
databaseInstance.set(QUALIFIED_NAME, dbName);
databaseInstance.set(CLUSTER_NAME, randomString());
databaseInstance.set(DESCRIPTION, "foo database");
return databaseInstance;
}
protected Referenceable createHiveDBInstanceV1(String dbName) {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
databaseInstance.set(NAME, dbName);
databaseInstance.set(QUALIFIED_NAME, dbName);
databaseInstance.set(CLUSTER_NAME, randomString());
databaseInstance.set(DESCRIPTION, "foo database");
return databaseInstance;
}
protected AtlasEntity createHiveDBInstanceV2(String dbName) {
AtlasEntity atlasEntity = new AtlasEntity(DATABASE_TYPE);
AtlasEntity atlasEntity = new AtlasEntity(DATABASE_TYPE_V2);
atlasEntity.setAttribute(NAME, dbName);
atlasEntity.setAttribute(QUALIFIED_NAME, dbName);
atlasEntity.setAttribute(DESCRIPTION, "foo database");
atlasEntity.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName);
atlasEntity.setAttribute("owner", "user1");
atlasEntity.setAttribute(CLUSTER_NAME, "cl1");
atlasEntity.setAttribute("parameters", Collections.EMPTY_MAP);
atlasEntity.setAttribute("location", "/tmp");
atlasEntity.setAttribute("locationUri", "/tmp");
atlasEntity.setAttribute("createTime",1000);
return atlasEntity;
}
public interface Predicate {
/**
......
......@@ -149,8 +149,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
LOG.info("JsonRow - {}", row);
Assert.assertNotNull(row.getString("name"));
Assert.assertNotNull(row.getString("comment"));
Assert.assertNotNull(row.getString("type"));
Assert.assertEquals(row.getString("$typeName$"), "hive_column");
Assert.assertEquals(row.getString("$typeName$"), "hive_column_v1");
}
}
......@@ -168,8 +167,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
LOG.info("JsonRow - {}", row);
Assert.assertNotNull(row.getString("name"));
Assert.assertNotNull(row.getString("comment"));
Assert.assertNotNull(row.getString("type"));
Assert.assertEquals(row.getString("$typeName$"), "hive_column");
Assert.assertEquals(row.getString("$typeName$"), "hive_column_v1");
}
}
......
......@@ -56,12 +56,12 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
super.setUp();
dbName = "db" + randomString();
createTypes();
createInstance(createHiveDBInstanceV1(dbName));
createInstance(createHiveDBInstanceBuiltIn(dbName));
}
@Test
public void testSearchByDSL() throws Exception {
String dslQuery = "from "+ DATABASE_TYPE + " " + QUALIFIED_NAME + "=\"" + dbName + "\"";
String dslQuery = "from "+ DATABASE_TYPE_BUILTIN + " " + QUALIFIED_NAME + "=\"" + dbName + "\"";
AtlasSearchResult searchResult = discoveryClientV2.dslSearch(dslQuery);
assertNotNull(searchResult);
......@@ -73,7 +73,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
assertEquals(entities.size(), 1);
AtlasEntityHeaderWithAssociations dbEntity = entities.get(0);
assertEquals(dbEntity.getTypeName(), DATABASE_TYPE);
assertEquals(dbEntity.getTypeName(), DATABASE_TYPE_BUILTIN);
assertEquals(dbEntity.getDisplayText(), dbName);
assertEquals(dbEntity.getStatus(), Status.ACTIVE);
assertNotNull(dbEntity.getGuid());
......@@ -83,7 +83,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test
public void testSearchDSLLimits() throws Exception {
String dslQuery = "from "+ DATABASE_TYPE + " " + QUALIFIED_NAME + "=\"" + dbName + "\"";
String dslQuery = "from "+ DATABASE_TYPE_BUILTIN + " " + QUALIFIED_NAME + "=\"" + dbName + "\"";
AtlasSearchResult searchResult = discoveryClientV2.dslSearch(dslQuery);
assertNotNull(searchResult);
......@@ -124,7 +124,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test
public void testSearchUsingDSL() throws Exception {
String query = "from "+ DATABASE_TYPE + " " + QUALIFIED_NAME + "=\"" + dbName + "\"";
String query = "from "+ DATABASE_TYPE_BUILTIN + " " + QUALIFIED_NAME + "=\"" + dbName + "\"";
AtlasSearchResult searchResult = discoveryClientV2.dslSearch(query);
assertNotNull(searchResult);
......@@ -135,7 +135,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
assertEquals(entities.size(), 1);
AtlasEntityHeaderWithAssociations dbEntity = entities.get(0);
assertEquals(dbEntity.getTypeName(), DATABASE_TYPE);
assertEquals(dbEntity.getTypeName(), DATABASE_TYPE_BUILTIN);
assertEquals(dbEntity.getDisplayText(), dbName);
assertEquals(dbEntity.getStatus(), Status.ACTIVE);
......@@ -166,7 +166,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
AtlasFullTextResult result = fullTextResults.get(0);
assertNotNull(result.getEntity());
assertEquals(result.getEntity().getTypeName(), DATABASE_TYPE);
assertEquals(result.getEntity().getTypeName(), DATABASE_TYPE_BUILTIN);
assertNotNull(result.getScore());
//API works without limit and offset
......
......@@ -66,7 +66,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test
public void testSearchByDSL() throws Exception {
String dslQuery = "from "+ DATABASE_TYPE + " qualifiedName=\"" + dbName + "\"";
String dslQuery = "from "+ DATABASE_TYPE + " name=\"" + dbName + "\"";
MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl();
queryParams.add("query", dslQuery);
JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH_DSL, queryParams);
......@@ -89,7 +89,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
public void testSearchDSLLimits() throws Exception {
//search without new parameters of limit and offset should work
String dslQuery = "from "+ DATABASE_TYPE + " qualifiedName=\"" + dbName + "\"";
String dslQuery = "from "+ DATABASE_TYPE + " name=\"" + dbName + "\"";
MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl();
queryParams.add("query", dslQuery);
JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH_DSL, queryParams);
......@@ -146,7 +146,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test
public void testSearchUsingGremlin() throws Exception {
String query = "g.V.has('type', 'hive_db').toList()";
String query = "g.V.has('type', '" + BaseResourceIT.HIVE_TABLE_TYPE + "').toList()";
MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl();
queryParams.add("query", query);
......@@ -162,7 +162,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test
public void testSearchUsingDSL() throws Exception {
//String query = "from dsl_test_type";
String query = "from "+ DATABASE_TYPE + " qualifiedName=\"" + dbName +"\"";
String query = "from "+ DATABASE_TYPE + " name=\"" + dbName +"\"";
MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl();
queryParams.add("query", query);
JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH, queryParams);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment