Commit 89f70609 by Jeff Hagelberg

ATLAS-1114: Performance improvements for create/update entity (2 of 2)

parent 6cd68119
...@@ -18,17 +18,17 @@ ...@@ -18,17 +18,17 @@
package org.apache.atlas.repository.audit; package org.apache.atlas.repository.audit;
import org.apache.atlas.AtlasException;
import org.apache.atlas.EntityAuditEvent;
import com.google.inject.Singleton;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.SortedMap; import java.util.SortedMap;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.atlas.AtlasException;
import org.apache.atlas.EntityAuditEvent;
import com.google.inject.Singleton;
/** /**
* Entity audit repository where audit events are stored in-memory. Used only for integration tests * Entity audit repository where audit events are stored in-memory. Used only for integration tests
*/ */
...@@ -50,8 +50,10 @@ public class InMemoryEntityAuditRepository implements EntityAuditRepository { ...@@ -50,8 +50,10 @@ public class InMemoryEntityAuditRepository implements EntityAuditRepository {
} }
} }
//synchronized to avoid concurrent modification exception that occurs if events are added
//while we are iterating through the map
@Override @Override
public List<EntityAuditEvent> listEvents(String entityId, String startKey, short maxResults) public synchronized List<EntityAuditEvent> listEvents(String entityId, String startKey, short maxResults)
throws AtlasException { throws AtlasException {
List<EntityAuditEvent> events = new ArrayList<>(); List<EntityAuditEvent> events = new ArrayList<>();
String myStartKey = startKey; String myStartKey = startKey;
......
...@@ -22,6 +22,7 @@ import static org.apache.atlas.repository.graph.GraphHelper.EDGE_LABEL_PREFIX; ...@@ -22,6 +22,7 @@ import static org.apache.atlas.repository.graph.GraphHelper.EDGE_LABEL_PREFIX;
import static org.apache.atlas.repository.graph.GraphHelper.string; import static org.apache.atlas.repository.graph.GraphHelper.string;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
...@@ -70,7 +71,7 @@ public abstract class DeleteHandler { ...@@ -70,7 +71,7 @@ public abstract class DeleteHandler {
* @param instanceVertices * @param instanceVertices
* @throws AtlasException * @throws AtlasException
*/ */
public void deleteEntities(List<AtlasVertex> instanceVertices) throws AtlasException { public void deleteEntities(Collection<AtlasVertex> instanceVertices) throws AtlasException {
RequestContext requestContext = RequestContext.get(); RequestContext requestContext = RequestContext.get();
Set<AtlasVertex> deletionCandidateVertices = new HashSet<>(); Set<AtlasVertex> deletionCandidateVertices = new HashSet<>();
......
...@@ -17,10 +17,15 @@ ...@@ -17,10 +17,15 @@
*/ */
package org.apache.atlas.repository.graph; package org.apache.atlas.repository.graph;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.atlas.AtlasException; import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.typesystem.ITypedInstance; import org.apache.atlas.typesystem.ITypedInstance;
import org.apache.atlas.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.AttributeInfo; import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.EnumValue; import org.apache.atlas.typesystem.types.EnumValue;
...@@ -29,23 +34,22 @@ import org.apache.commons.lang.StringUtils; ...@@ -29,23 +34,22 @@ import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class FullTextMapper { public class FullTextMapper {
private static final Logger LOG = LoggerFactory.getLogger(FullTextMapper.class); private static final Logger LOG = LoggerFactory.getLogger(FullTextMapper.class);
private final GraphToTypedInstanceMapper graphToTypedInstanceMapper; private final GraphToTypedInstanceMapper graphToTypedInstanceMapper;
private final TypedInstanceToGraphMapper typedInstanceToGraphMapper;
private static final GraphHelper graphHelper = GraphHelper.getInstance(); private static final GraphHelper graphHelper = GraphHelper.getInstance();
private static final String FULL_TEXT_DELIMITER = " "; private static final String FULL_TEXT_DELIMITER = " ";
private final Map<String, ITypedReferenceableInstance> instanceCache; private final Map<String, ITypedReferenceableInstance> instanceCache;
FullTextMapper(GraphToTypedInstanceMapper graphToTypedInstanceMapper) { FullTextMapper(TypedInstanceToGraphMapper typedInstanceToGraphMapper,
GraphToTypedInstanceMapper graphToTypedInstanceMapper) {
this.graphToTypedInstanceMapper = graphToTypedInstanceMapper; this.graphToTypedInstanceMapper = graphToTypedInstanceMapper;
this.typedInstanceToGraphMapper = typedInstanceToGraphMapper;
instanceCache = new HashMap<>(); instanceCache = new HashMap<>();
} }
...@@ -126,8 +130,12 @@ public class FullTextMapper { ...@@ -126,8 +130,12 @@ public class FullTextMapper {
case CLASS: case CLASS:
if (followReferences) { if (followReferences) {
String refGuid = ((ITypedReferenceableInstance) value).getId()._getId(); Id refId = ((ITypedReferenceableInstance) value).getId();
AtlasVertex refVertex = graphHelper.getVertexForGUID(refGuid); String refGuid = refId._getId();
AtlasVertex refVertex = typedInstanceToGraphMapper.lookupVertex(refId);
if(refVertex == null) {
refVertex = graphHelper.getVertexForGUID(refGuid);
}
return mapRecursive(refVertex, false); return mapRecursive(refVertex, false);
} }
break; break;
......
...@@ -19,9 +19,11 @@ ...@@ -19,9 +19,11 @@
package org.apache.atlas.repository.graph; package org.apache.atlas.repository.graph;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map;
import org.apache.atlas.AtlasClient; import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException; import org.apache.atlas.AtlasException;
...@@ -306,7 +308,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -306,7 +308,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Deleting trait={} from entity={}", traitNameToBeDeleted, guid); LOG.debug("Deleting trait={} from entity={}", traitNameToBeDeleted, guid);
} }
AtlasVertex instanceVertex = graphHelper.getVertexForGUID(guid); AtlasVertex instanceVertex = graphHelper.getVertexForGUID(guid);
List<String> traitNames = GraphHelper.getTraitNames(instanceVertex); List<String> traitNames = GraphHelper.getTraitNames(instanceVertex);
...@@ -331,7 +333,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -331,7 +333,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
} }
} }
private void updateTraits(AtlasVertex instanceVertex, List<String> traitNames) { private void updateTraits(AtlasVertex instanceVertex, List<String> traitNames) {
// remove the key // remove the key
instanceVertex.removeProperty(Constants.TRAIT_NAMES_PROPERTY_KEY); instanceVertex.removeProperty(Constants.TRAIT_NAMES_PROPERTY_KEY);
...@@ -357,8 +359,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -357,8 +359,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_FULL, instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_FULL,
entitiesUpdated); entitiesUpdated);
RequestContext requestContext = RequestContext.get(); RequestContext requestContext = RequestContext.get();
return new AtlasClient.EntityResult(requestContext.getCreatedEntityIds(), return createEntityResultFromContext(requestContext);
requestContext.getUpdatedEntityIds(), requestContext.getDeletedEntityIds());
} catch (AtlasException e) { } catch (AtlasException e) {
throw new RepositoryException(e); throw new RepositoryException(e);
} }
...@@ -375,13 +376,14 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -375,13 +376,14 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper, deleteHandler); TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper, deleteHandler);
instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_PARTIAL, entity); instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_PARTIAL, entity);
RequestContext requestContext = RequestContext.get(); RequestContext requestContext = RequestContext.get();
return new AtlasClient.EntityResult(requestContext.getCreatedEntityIds(), return createEntityResultFromContext(requestContext);
requestContext.getUpdatedEntityIds(), requestContext.getDeletedEntityIds());
} catch (AtlasException e) { } catch (AtlasException e) {
throw new RepositoryException(e); throw new RepositoryException(e);
} }
} }
@Override @Override
@GraphTransaction @GraphTransaction
public AtlasClient.EntityResult deleteEntities(List<String> guids) throws RepositoryException { public AtlasClient.EntityResult deleteEntities(List<String> guids) throws RepositoryException {
...@@ -390,32 +392,41 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -390,32 +392,41 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
throw new IllegalArgumentException("guids must be non-null and non-empty"); throw new IllegalArgumentException("guids must be non-null and non-empty");
} }
List<AtlasVertex> vertices = new ArrayList<>(guids.size()); // Retrieve vertices for requested guids.
for (String guid : guids) { Map<String, AtlasVertex> vertices = graphHelper.getVerticesForGUIDs(guids);
if (guid == null) { Collection<AtlasVertex> deletionCandidates = vertices.values();
LOG.warn("deleteEntities: Ignoring null guid");
continue; if(LOG.isDebugEnabled()) {
} for(String guid : guids) {
try { if(! vertices.containsKey(guid)) {
AtlasVertex instanceVertex = graphHelper.getVertexForGUID(guid); // Entity does not exist - treat as non-error, since the caller
vertices.add(instanceVertex); // wanted to delete the entity and it's already gone.
} catch (EntityNotFoundException e) { LOG.debug("Deletion request ignored for non-existent entity with guid " + guid);
// Entity does not exist - treat as non-error, since the caller }
// wanted to delete the entity and it's already gone.
LOG.info("Deletion request ignored for non-existent entity with guid {}", guid);
} }
} }
if (deletionCandidates.isEmpty()) {
LOG.info("No deletion candidate entities were found for guids %s", guids);
return new AtlasClient.EntityResult(Collections.<String>emptyList(), Collections.<String>emptyList(), Collections.<String>emptyList());
}
try { try {
deleteHandler.deleteEntities(vertices); deleteHandler.deleteEntities(deletionCandidates);
} }
catch (AtlasException e) { catch (AtlasException e) {
throw new RepositoryException(e); throw new RepositoryException(e);
} }
RequestContext requestContext = RequestContext.get(); RequestContext requestContext = RequestContext.get();
return new AtlasClient.EntityResult(requestContext.getCreatedEntityIds(), return createEntityResultFromContext(requestContext);
requestContext.getUpdatedEntityIds(), requestContext.getDeletedEntityIds()); }
private AtlasClient.EntityResult createEntityResultFromContext(RequestContext requestContext) {
return new AtlasClient.EntityResult(
requestContext.getCreatedEntityIds(),
requestContext.getUpdatedEntityIds(),
requestContext.getDeletedEntityIds());
} }
public AtlasGraph getGraph() { public AtlasGraph getGraph() {
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.graph;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.IDataType;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
/**
* Helper class for TypedInstanceGraphMapper. Determines which instances
* should be loaded by GUID and which ones should be loaded by unique attribute.
* In addition, it sorts the instances that should be loaded by unique
* attribute by class.
*
*/
public class VertexLookupContext {
private final TypedInstanceToGraphMapper mapper;
private static final TypeSystem typeSystem = TypeSystem.getInstance();
private Map<ClassType,List<IReferenceableInstance>> instancesWithoutGuids = new HashMap<>();
private Set<Id> guidsToLookup = new HashSet<>();
/**
* @param typedInstanceToGraphMapper
*/
VertexLookupContext(TypedInstanceToGraphMapper typedInstanceToGraphMapper) {
mapper = typedInstanceToGraphMapper;
}
/**
* Adds an instance to be loaded.
*
*/
public void addInstance(IReferenceableInstance instance) throws AtlasException {
ClassType classType = typeSystem.getDataType(ClassType.class, instance.getTypeName());
ITypedReferenceableInstance newInstance = classType.convert(instance, Multiplicity.REQUIRED);
findReferencedInstancesToPreLoad(newInstance);
Id id = instance.getId();
if(mapper.lookupVertex(id) == null) {
if(id.isAssigned()) {
guidsToLookup.add(id);
}
else {
addToClassMap(classType, instance);
}
}
}
/**
* Returns the instances that should be loaded by unique attribute, sorted by
* class.
*
*/
public Map<ClassType,List<IReferenceableInstance>> getInstancesToLoadByUniqueAttribute() {
return instancesWithoutGuids;
}
/**
* Returns the Ids of the instance that should be loaded by GUID
*
* @return
*/
public Set<Id> getInstancesToLoadByGuid() {
return guidsToLookup;
}
private void addToClassMap(ClassType classType, IReferenceableInstance instance) throws AtlasException {
List<IReferenceableInstance> toUpdate = instancesWithoutGuids.get(classType);
if(toUpdate == null) {
toUpdate = new ArrayList<>();
instancesWithoutGuids.put(classType, toUpdate);
}
toUpdate.add(instance);
}
private void findReferencedInstancesToPreLoad(ITypedReferenceableInstance newInstance) throws AtlasException {
//pre-load vertices for reference fields
for(AttributeInfo info : newInstance.fieldMapping().fields.values()) {
if(info.dataType().getTypeCategory() == TypeCategory.CLASS) {
ITypedReferenceableInstance newAttributeValue = (ITypedReferenceableInstance)newInstance.get(info.name);
addAdditionalInstance(newAttributeValue);
}
if(info.dataType().getTypeCategory() == TypeCategory.ARRAY) {
IDataType elementType = ((DataTypes.ArrayType) info.dataType()).getElemType();
if(elementType.getTypeCategory() == TypeCategory.CLASS) {
List<ITypedReferenceableInstance> newElements = (List) newInstance.get(info.name);
addAdditionalInstances(newElements);
}
}
if(info.dataType().getTypeCategory() == TypeCategory.MAP) {
IDataType elementType = ((DataTypes.MapType) info.dataType()).getValueType();
if(elementType.getTypeCategory() == TypeCategory.CLASS) {
Map<Object, ITypedReferenceableInstance> newAttribute =
(Map<Object, ITypedReferenceableInstance>) newInstance.get(info.name);
if(newAttribute != null) {
addAdditionalInstances(newAttribute.values());
}
}
}
}
}
private void addAdditionalInstance(ITypedReferenceableInstance instance) {
if(instance == null) {
return;
}
Id id = mapper.getExistingId(instance);
if(! id.isAssigned()) {
return;
}
guidsToLookup.add(id);
}
private void addAdditionalInstances(Collection<ITypedReferenceableInstance> newElements) {
if(newElements != null) {
for(ITypedReferenceableInstance instance: newElements) {
addAdditionalInstance(instance);
}
}
}
}
\ No newline at end of file
...@@ -367,6 +367,10 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang ...@@ -367,6 +367,10 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang
private void validateUniqueAttribute(String entityType, String attributeName) throws AtlasException { private void validateUniqueAttribute(String entityType, String attributeName) throws AtlasException {
ClassType type = typeSystem.getDataType(ClassType.class, entityType); ClassType type = typeSystem.getDataType(ClassType.class, entityType);
AttributeInfo attribute = type.fieldMapping().fields.get(attributeName); AttributeInfo attribute = type.fieldMapping().fields.get(attributeName);
if(attribute == null) {
throw new IllegalArgumentException(
String.format("%s is not an attribute in %s", attributeName, entityType));
}
if (!attribute.isUnique) { if (!attribute.isUnique) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
String.format("%s.%s is not a unique attribute", entityType, attributeName)); String.format("%s.%s is not a unique attribute", entityType, attributeName));
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.util;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.typesystem.IReferenceableInstance;
/**
* Map of attribute values to a collection of IndexedInstances with that attribute value.
*
* @see GraphHelper#getVerticesForInstancesByUniqueAttributes
*
*/
public class AttributeValueMap {
//need collection in case they are adding the same entity twice?
private Map<Object,Collection<IndexedInstance>> valueMap_ = new HashMap<>();
public void put(Object value, IReferenceableInstance instance, int index) {
IndexedInstance wrapper = new IndexedInstance(instance, index);
Collection<IndexedInstance> existingValues = valueMap_.get(value);
if(existingValues == null) {
//only expect 1 value
existingValues = new HashSet<>(1);
valueMap_.put(value, existingValues);
}
existingValues.add(wrapper);
}
public Collection<IndexedInstance> get(Object value) {
return valueMap_.get(value);
}
public Set<Object> getAttributeValues() {
return valueMap_.keySet();
}
}
\ No newline at end of file
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.util;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.typesystem.IReferenceableInstance;
/**
* Data structure that stores an IReferenceableInstance and its location within
* a list.
*
* @see GraphHelper#getVerticesForInstancesByUniqueAttributes
*/
public class IndexedInstance {
private final IReferenceableInstance instance_;
private final int index_;
public IndexedInstance(IReferenceableInstance instance, int index) {
super();
this.instance_ = instance;
this.index_ = index;
}
public IReferenceableInstance getInstance() {
return instance_;
}
public int getIndex() {
return index_;
}
@Override
public int hashCode() {
return instance_.hashCode();
}
@Override
public boolean equals(Object other) {
if(!(other instanceof IndexedInstance)) {
return false;
}
IndexedInstance otherInstance = (IndexedInstance)other;
return instance_.equals(otherInstance.getInstance());
}
}
\ No newline at end of file
...@@ -18,15 +18,42 @@ ...@@ -18,15 +18,42 @@
package org.apache.atlas.repository.graph; package org.apache.atlas.repository.graph;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import org.apache.atlas.AtlasException;
import org.apache.atlas.RepositoryMetadataModule; import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.TestUtils; import org.apache.atlas.TestUtils;
import org.apache.atlas.repository.graph.GraphHelper.VertexInfo; import org.apache.atlas.repository.graph.GraphHelper.VertexInfo;
import org.apache.atlas.repository.graphdb.AtlasEdge; import org.apache.atlas.repository.graphdb.AtlasEdge;
import org.apache.atlas.repository.graphdb.AtlasGraph; import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.TypesDef;
import org.apache.atlas.typesystem.exception.TypeNotFoundException;
import org.apache.atlas.typesystem.json.InstanceSerialization;
import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.codehaus.jettison.json.JSONArray;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.AfterClass; import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
...@@ -34,20 +61,6 @@ import org.testng.annotations.DataProvider; ...@@ -34,20 +61,6 @@ import org.testng.annotations.DataProvider;
import org.testng.annotations.Guice; import org.testng.annotations.Guice;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
@Guice(modules = RepositoryMetadataModule.class) @Guice(modules = RepositoryMetadataModule.class)
public class GraphHelperTest { public class GraphHelperTest {
...@@ -69,6 +82,9 @@ public class GraphHelperTest { ...@@ -69,6 +82,9 @@ public class GraphHelperTest {
} }
@Inject @Inject
private MetadataService metadataService;
@Inject
private GraphBackedMetadataRepository repositoryService; private GraphBackedMetadataRepository repositoryService;
private TypeSystem typeSystem; private TypeSystem typeSystem;
...@@ -82,7 +98,12 @@ public class GraphHelperTest { ...@@ -82,7 +98,12 @@ public class GraphHelperTest {
typeSystem.reset(); typeSystem.reset();
new GraphBackedSearchIndexer(typeRegistry); new GraphBackedSearchIndexer(typeRegistry);
TypesDef typesDef = TestUtils.defineHiveTypes();
try {
metadataService.getTypeDefinition(TestUtils.TABLE_TYPE);
} catch (TypeNotFoundException e) {
metadataService.createType(TypesSerialization.toJson(typesDef));
}
TestUtils.defineDeptEmployeeTypes(typeSystem); TestUtils.defineDeptEmployeeTypes(typeSystem);
} }
...@@ -92,6 +113,43 @@ public class GraphHelperTest { ...@@ -92,6 +113,43 @@ public class GraphHelperTest {
} }
@Test @Test
public void testGetInstancesByUniqueAttributes() throws Exception {
GraphHelper helper = GraphHelper.getInstance();
List<ITypedReferenceableInstance> instances = new ArrayList<>();
List<String> guids = new ArrayList<>();
TypeSystem ts = TypeSystem.getInstance();
ClassType dbType = ts.getDataType(ClassType.class, TestUtils.DATABASE_TYPE);
for(int i = 0; i < 10; i++) {
Referenceable db = TestUtils.createDBEntity();
String guid = createInstance(db);
ITypedReferenceableInstance instance = convert(db, dbType);
instances.add(instance);
guids.add(guid);
}
//lookup vertices via getVertexForInstanceByUniqueAttributes
List<AtlasVertex> vertices = helper.getVerticesForInstancesByUniqueAttribute(dbType, instances);
assertEquals(instances.size(), vertices.size());
//assert vertex matches the vertex we get through getVertexForGUID
for(int i = 0; i < instances.size(); i++) {
String guid = guids.get(i);
AtlasVertex foundVertex = vertices.get(i);
AtlasVertex expectedVertex = helper.getVertexForGUID(guid);
assertEquals(foundVertex, expectedVertex);
}
}
@Test
public void testGetVerticesForGUIDSWithDuplicates() throws Exception {
ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(TypeSystem.getInstance());
List<String> result = repositoryService.createEntities(hrDept);
String guid = result.get(0);
Map<String, AtlasVertex> verticesForGUIDs = GraphHelper.getInstance().getVerticesForGUIDs(Arrays.asList(guid, guid));
Assert.assertEquals(verticesForGUIDs.size(), 1);
Assert.assertTrue(verticesForGUIDs.containsKey(guid));
}
@Test
public void testGetCompositeGuidsAndVertices() throws Exception { public void testGetCompositeGuidsAndVertices() throws Exception {
ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(typeSystem); ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(typeSystem);
List<String> createdGuids = repositoryService.createEntities(hrDept); List<String> createdGuids = repositoryService.createEntities(hrDept);
...@@ -144,4 +202,22 @@ public class GraphHelperTest { ...@@ -144,4 +202,22 @@ public class GraphHelperTest {
assertFalse(iterator.hasNext()); assertFalse(iterator.hasNext());
assertFalse(iterator.hasNext()); assertFalse(iterator.hasNext());
} }
private ITypedReferenceableInstance convert(Referenceable instance, ClassType type) throws AtlasException {
return type.convert(instance, Multiplicity.REQUIRED);
}
private String createInstance(Referenceable entity) throws Exception {
TestUtils.resetRequestContext();
String entityjson = InstanceSerialization.toJson(entity, true);
JSONArray entitiesJson = new JSONArray();
entitiesJson.put(entityjson);
List<String> guids = metadataService.createEntities(entitiesJson.toString());
if (guids != null && guids.size() > 0) {
return guids.get(guids.size() - 1);
}
return null;
}
} }
...@@ -77,6 +77,7 @@ atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address} ...@@ -77,6 +77,7 @@ atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address}
######### Hive Lineage Configs ######### ######### Hive Lineage Configs #########
## Schema ## Schema
atlas.lineage.schema.query.hive_table=hive_table where __guid='%s'\, columns atlas.lineage.schema.query.hive_table=hive_table where __guid='%s'\, columns
atlas.lineage.schema.query.hive_table_v1=hive_table_v1 where __guid='%s'\, columns
######### Notification Configs ######### ######### Notification Configs #########
atlas.notification.embedded=true atlas.notification.embedded=true
......
...@@ -144,7 +144,7 @@ public class EntityResource { ...@@ -144,7 +144,7 @@ public class EntityResource {
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.CONFLICT)); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.CONFLICT));
} catch (ValueConversionException ve) { } catch (ValueConversionException ve) {
LOG.error("Unable to persist entity instance due to a deserialization error entityDef={}", entityJson, ve); LOG.error("Unable to persist entity instance due to a deserialization error entityDef={}", entityJson, ve);
throw new WebApplicationException(Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST)); throw new WebApplicationException(Servlets.getErrorResponse(ve.getCause() != null ? ve.getCause() : ve, Response.Status.BAD_REQUEST));
} catch (AtlasException | IllegalArgumentException e) { } catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to persist entity instance entityDef={}", entityJson, e); LOG.error("Unable to persist entity instance entityDef={}", entityJson, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
......
...@@ -51,8 +51,6 @@ import static org.testng.Assert.assertTrue; ...@@ -51,8 +51,6 @@ import static org.testng.Assert.assertTrue;
@Guice(modules = NotificationModule.class) @Guice(modules = NotificationModule.class)
public class EntityNotificationIT extends BaseResourceIT { public class EntityNotificationIT extends BaseResourceIT {
private static final String ENTITIES = "api/atlas/entities";
private static final String TRAITS = "traits";
private final String DATABASE_NAME = "db" + randomString(); private final String DATABASE_NAME = "db" + randomString();
private final String TABLE_NAME = "table" + randomString(); private final String TABLE_NAME = "table" + randomString();
@Inject @Inject
...@@ -66,7 +64,7 @@ public class EntityNotificationIT extends BaseResourceIT { ...@@ -66,7 +64,7 @@ public class EntityNotificationIT extends BaseResourceIT {
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
createTypeDefinitionsV1(); createTypeDefinitionsV1();
Referenceable HiveDBInstance = createHiveDBInstanceV1(DATABASE_NAME); Referenceable HiveDBInstance = createHiveDBInstanceBuiltIn(DATABASE_NAME);
dbId = createInstance(HiveDBInstance); dbId = createInstance(HiveDBInstance);
List<NotificationConsumer<EntityNotification>> consumers = List<NotificationConsumer<EntityNotification>> consumers =
...@@ -77,13 +75,13 @@ public class EntityNotificationIT extends BaseResourceIT { ...@@ -77,13 +75,13 @@ public class EntityNotificationIT extends BaseResourceIT {
@Test @Test
public void testCreateEntity() throws Exception { public void testCreateEntity() throws Exception {
Referenceable tableInstance = createHiveTableInstanceV1(DATABASE_NAME, TABLE_NAME, dbId); Referenceable tableInstance = createHiveTableInstanceBuiltIn(DATABASE_NAME, TABLE_NAME, dbId);
tableId = createInstance(tableInstance); tableId = createInstance(tableInstance);
final String guid = tableId._getId(); final String guid = tableId._getId();
waitForNotification(notificationConsumer, MAX_WAIT_TIME, waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE, guid)); newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE_BUILTIN, guid));
} }
@Test(dependsOnMethods = "testCreateEntity") @Test(dependsOnMethods = "testCreateEntity")
...@@ -96,29 +94,29 @@ public class EntityNotificationIT extends BaseResourceIT { ...@@ -96,29 +94,29 @@ public class EntityNotificationIT extends BaseResourceIT {
atlasClientV1.updateEntityAttribute(guid, property, newValue); atlasClientV1.updateEntityAttribute(guid, property, newValue);
waitForNotification(notificationConsumer, MAX_WAIT_TIME, waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_UPDATE, HIVE_TABLE_TYPE, guid)); newNotificationPredicate(EntityNotification.OperationType.ENTITY_UPDATE, HIVE_TABLE_TYPE_BUILTIN, guid));
} }
@Test @Test
public void testDeleteEntity() throws Exception { public void testDeleteEntity() throws Exception {
final String tableName = "table-" + randomString(); final String tableName = "table-" + randomString();
final String dbName = "db-" + randomString(); final String dbName = "db-" + randomString();
Referenceable HiveDBInstance = createHiveDBInstanceV1(dbName); Referenceable HiveDBInstance = createHiveDBInstanceBuiltIn(dbName);
Id dbId = createInstance(HiveDBInstance); Id dbId = createInstance(HiveDBInstance);
Referenceable tableInstance = createHiveTableInstanceV1(dbName, tableName, dbId); Referenceable tableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId);
final Id tableId = createInstance(tableInstance); final Id tableId = createInstance(tableInstance);
final String guid = tableId._getId(); final String guid = tableId._getId();
waitForNotification(notificationConsumer, MAX_WAIT_TIME, waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE, guid)); newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE_BUILTIN, guid));
final String name = (String) tableInstance.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME); final String name = (String) tableInstance.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME);
atlasClientV1.deleteEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); atlasClientV1.deleteEntity(HIVE_TABLE_TYPE_BUILTIN, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
waitForNotification(notificationConsumer, MAX_WAIT_TIME, waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.ENTITY_DELETE, HIVE_TABLE_TYPE, guid)); newNotificationPredicate(EntityNotification.OperationType.ENTITY_DELETE, HIVE_TABLE_TYPE_BUILTIN, guid));
} }
@Test(dependsOnMethods = "testCreateEntity") @Test(dependsOnMethods = "testCreateEntity")
...@@ -141,7 +139,7 @@ public class EntityNotificationIT extends BaseResourceIT { ...@@ -141,7 +139,7 @@ public class EntityNotificationIT extends BaseResourceIT {
atlasClientV1.addTrait(guid, traitInstance); atlasClientV1.addTrait(guid, traitInstance);
EntityNotification entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME, EntityNotification entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.TRAIT_ADD, HIVE_TABLE_TYPE, guid)); newNotificationPredicate(EntityNotification.OperationType.TRAIT_ADD, HIVE_TABLE_TYPE_BUILTIN, guid));
IReferenceableInstance entity = entityNotification.getEntity(); IReferenceableInstance entity = entityNotification.getEntity();
assertTrue(entity.getTraits().contains(traitName)); assertTrue(entity.getTraits().contains(traitName));
...@@ -166,7 +164,7 @@ public class EntityNotificationIT extends BaseResourceIT { ...@@ -166,7 +164,7 @@ public class EntityNotificationIT extends BaseResourceIT {
atlasClientV1.addTrait(guid, traitInstance); atlasClientV1.addTrait(guid, traitInstance);
entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME, entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.TRAIT_ADD, HIVE_TABLE_TYPE, guid)); newNotificationPredicate(EntityNotification.OperationType.TRAIT_ADD, HIVE_TABLE_TYPE_BUILTIN, guid));
allTraits = entityNotification.getAllTraits(); allTraits = entityNotification.getAllTraits();
allTraitNames = new LinkedList<>(); allTraitNames = new LinkedList<>();
...@@ -187,7 +185,7 @@ public class EntityNotificationIT extends BaseResourceIT { ...@@ -187,7 +185,7 @@ public class EntityNotificationIT extends BaseResourceIT {
atlasClientV1.deleteTrait(guid, traitName); atlasClientV1.deleteTrait(guid, traitName);
EntityNotification entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME, EntityNotification entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME,
newNotificationPredicate(EntityNotification.OperationType.TRAIT_DELETE, HIVE_TABLE_TYPE, guid)); newNotificationPredicate(EntityNotification.OperationType.TRAIT_DELETE, HIVE_TABLE_TYPE_BUILTIN, guid));
assertFalse(entityNotification.getEntity().getTraits().contains(traitName)); assertFalse(entityNotification.getEntity().getTraits().contains(traitName));
} }
......
...@@ -68,7 +68,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -68,7 +68,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
new Referenceable(randomString()))); new Referenceable(randomString())));
//send valid message //send valid message
final Referenceable entity = new Referenceable(DATABASE_TYPE); final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
String dbName = "db" + randomString(); String dbName = "db" + randomString();
entity.set(NAME, dbName); entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString()); entity.set(DESCRIPTION, randomString());
...@@ -79,7 +79,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -79,7 +79,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
waitFor(MAX_WAIT_TIME, new Predicate() { waitFor(MAX_WAIT_TIME, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, entity.get(NAME))); JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE_BUILTIN, entity.get(NAME)));
return results.length() == 1; return results.length() == 1;
} }
}); });
...@@ -87,7 +87,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -87,7 +87,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
@Test @Test
public void testCreateEntity() throws Exception { public void testCreateEntity() throws Exception {
final Referenceable entity = new Referenceable(DATABASE_TYPE); final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
String dbName = "db" + randomString(); String dbName = "db" + randomString();
entity.set(NAME, dbName); entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString()); entity.set(DESCRIPTION, randomString());
...@@ -99,13 +99,13 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -99,13 +99,13 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
waitFor(MAX_WAIT_TIME, new Predicate() { waitFor(MAX_WAIT_TIME, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE, entity.get(QUALIFIED_NAME))); JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, entity.get(QUALIFIED_NAME)));
return results.length() == 1; return results.length() == 1;
} }
}); });
//Assert that user passed in hook message is used in audit //Assert that user passed in hook message is used in audit
Referenceable instance = atlasClientV1.getEntity(DATABASE_TYPE, QUALIFIED_NAME, (String) entity.get(QUALIFIED_NAME)); Referenceable instance = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, (String) entity.get(QUALIFIED_NAME));
List<EntityAuditEvent> events = List<EntityAuditEvent> events =
atlasClientV1.getEntityAuditEvents(instance.getId()._getId(), (short) 1); atlasClientV1.getEntityAuditEvents(instance.getId()._getId(), (short) 1);
assertEquals(events.size(), 1); assertEquals(events.size(), 1);
...@@ -114,7 +114,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -114,7 +114,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
@Test @Test
public void testUpdateEntityPartial() throws Exception { public void testUpdateEntityPartial() throws Exception {
final Referenceable entity = new Referenceable(DATABASE_TYPE); final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
final String dbName = "db" + randomString(); final String dbName = "db" + randomString();
entity.set(NAME, dbName); entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString()); entity.set(DESCRIPTION, randomString());
...@@ -123,26 +123,26 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -123,26 +123,26 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
atlasClientV1.createEntity(entity); atlasClientV1.createEntity(entity);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE); final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN);
newEntity.set("owner", randomString()); newEntity.set("owner", randomString());
sendHookMessage( sendHookMessage(
new HookNotification.EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE, QUALIFIED_NAME, dbName, newEntity)); new HookNotification.EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName, newEntity));
waitFor(MAX_WAIT_TIME, new Predicate() { waitFor(MAX_WAIT_TIME, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
Referenceable localEntity = atlasClientV1.getEntity(DATABASE_TYPE, QUALIFIED_NAME, dbName); Referenceable localEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName);
return (localEntity.get("owner") != null && localEntity.get("owner").equals(newEntity.get("owner"))); return (localEntity.get("owner") != null && localEntity.get("owner").equals(newEntity.get("owner")));
} }
}); });
//Its partial update and un-set fields are not updated //Its partial update and un-set fields are not updated
Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE, QUALIFIED_NAME, dbName); Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName);
assertEquals(actualEntity.get(DESCRIPTION), entity.get(DESCRIPTION)); assertEquals(actualEntity.get(DESCRIPTION), entity.get(DESCRIPTION));
} }
@Test @Test
public void testUpdatePartialUpdatingQualifiedName() throws Exception { public void testUpdatePartialUpdatingQualifiedName() throws Exception {
final Referenceable entity = new Referenceable(DATABASE_TYPE); final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
final String dbName = "db" + randomString(); final String dbName = "db" + randomString();
entity.set(NAME, dbName); entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString()); entity.set(DESCRIPTION, randomString());
...@@ -151,29 +151,29 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -151,29 +151,29 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
atlasClientV1.createEntity(entity); atlasClientV1.createEntity(entity);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE); final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN);
final String newName = "db" + randomString(); final String newName = "db" + randomString();
newEntity.set(QUALIFIED_NAME, newName); newEntity.set(QUALIFIED_NAME, newName);
sendHookMessage( sendHookMessage(
new HookNotification.EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE, QUALIFIED_NAME, dbName, newEntity)); new HookNotification.EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName, newEntity));
waitFor(MAX_WAIT_TIME, new Predicate() { waitFor(MAX_WAIT_TIME, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE, newName)); JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, newName));
return results.length() == 1; return results.length() == 1;
} }
}); });
//no entity with the old qualified name //no entity with the old qualified name
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE, dbName)); JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, dbName));
assertEquals(results.length(), 0); assertEquals(results.length(), 0);
} }
@Test @Test
public void testDeleteByQualifiedName() throws Exception { public void testDeleteByQualifiedName() throws Exception {
Referenceable entity = new Referenceable(DATABASE_TYPE); Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
final String dbName = "db" + randomString(); final String dbName = "db" + randomString();
entity.set(NAME, dbName); entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString()); entity.set(DESCRIPTION, randomString());
...@@ -183,7 +183,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -183,7 +183,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
final String dbId = atlasClientV1.createEntity(entity).get(0); final String dbId = atlasClientV1.createEntity(entity).get(0);
sendHookMessage( sendHookMessage(
new HookNotification.EntityDeleteRequest(TEST_USER, DATABASE_TYPE, QUALIFIED_NAME, dbName)); new HookNotification.EntityDeleteRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName));
waitFor(MAX_WAIT_TIME, new Predicate() { waitFor(MAX_WAIT_TIME, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
...@@ -195,7 +195,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -195,7 +195,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
@Test @Test
public void testUpdateEntityFullUpdate() throws Exception { public void testUpdateEntityFullUpdate() throws Exception {
Referenceable entity = new Referenceable(DATABASE_TYPE); Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
final String dbName = "db" + randomString(); final String dbName = "db" + randomString();
entity.set(NAME, dbName); entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString()); entity.set(DESCRIPTION, randomString());
...@@ -204,7 +204,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -204,7 +204,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
atlasClientV1.createEntity(entity); atlasClientV1.createEntity(entity);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE); final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN);
newEntity.set(NAME, randomString()); newEntity.set(NAME, randomString());
newEntity.set(DESCRIPTION, randomString()); newEntity.set(DESCRIPTION, randomString());
newEntity.set("owner", randomString()); newEntity.set("owner", randomString());
...@@ -216,12 +216,12 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -216,12 +216,12 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
waitFor(MAX_WAIT_TIME, new Predicate() { waitFor(MAX_WAIT_TIME, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE, newEntity.get(QUALIFIED_NAME))); JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, newEntity.get(QUALIFIED_NAME)));
return results.length() == 1; return results.length() == 1;
} }
}); });
Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE, QUALIFIED_NAME, dbName); Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName);
assertEquals(actualEntity.get(DESCRIPTION), newEntity.get(DESCRIPTION)); assertEquals(actualEntity.get(DESCRIPTION), newEntity.get(DESCRIPTION));
assertEquals(actualEntity.get("owner"), newEntity.get("owner")); assertEquals(actualEntity.get("owner"), newEntity.get("owner"));
} }
......
...@@ -95,6 +95,12 @@ public abstract class BaseResourceIT { ...@@ -95,6 +95,12 @@ public abstract class BaseResourceIT {
@BeforeClass @BeforeClass
public void setUp() throws Exception { public void setUp() throws Exception {
//set high timeouts so that tests do not fail due to read timeouts while you
//are stepping through the code in a debugger
ApplicationProperties.get().setProperty("atlas.client.readTimeoutMSecs", "100000000");
ApplicationProperties.get().setProperty("atlas.client.connectTimeoutMSecs", "100000000");
Configuration configuration = ApplicationProperties.get(); Configuration configuration = ApplicationProperties.get();
atlasUrls = configuration.getStringArray(ATLAS_REST_ADDRESS); atlasUrls = configuration.getStringArray(ATLAS_REST_ADDRESS);
...@@ -221,13 +227,18 @@ public abstract class BaseResourceIT { ...@@ -221,13 +227,18 @@ public abstract class BaseResourceIT {
try { try {
if (!update) { if (!update) {
entity = entitiesClientV2.createEntity(atlasEntity); entity = entitiesClientV2.createEntity(atlasEntity);
assertNotNull(entity);
assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE));
assertTrue(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).size() > 0);
return entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).get(0);
} else { } else {
entity = entitiesClientV2.updateEntity(atlasEntity); entity = entitiesClientV2.updateEntity(atlasEntity);
assertNotNull(entity);
assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE));
assertTrue(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).size() > 0);
return entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).get(0);
} }
assertNotNull(entity);
assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE));
assertTrue(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).size() > 0);
return entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).get(0);
} catch (AtlasServiceException e) { } catch (AtlasServiceException e) {
LOG.error("Entity {} failed", update ? "update" : "creation", entity); LOG.error("Entity {} failed", update ? "update" : "creation", entity);
} }
...@@ -242,10 +253,20 @@ public abstract class BaseResourceIT { ...@@ -242,10 +253,20 @@ public abstract class BaseResourceIT {
return modifyEntity(atlasEntity, true); return modifyEntity(atlasEntity, true);
} }
protected static final String DATABASE_TYPE = "hive_db"; protected static final String DATABASE_TYPE_V2 = "hive_db_v2";
protected static final String HIVE_TABLE_TYPE = "hive_table"; protected static final String HIVE_TABLE_TYPE_V2 = "hive_table_v2";
protected static final String COLUMN_TYPE = "hive_column"; protected static final String COLUMN_TYPE_V2 = "hive_column_v2";
protected static final String HIVE_PROCESS_TYPE = "hive_process"; protected static final String HIVE_PROCESS_TYPE_V2 = "hive_process_v2";
protected static final String DATABASE_TYPE = "hive_db_v1";
protected static final String HIVE_TABLE_TYPE = "hive_table_v1";
protected static final String COLUMN_TYPE = "hive_column_v1";
protected static final String HIVE_PROCESS_TYPE = "hive_process_v1";
protected static final String DATABASE_TYPE_BUILTIN = "hive_db";
protected static final String HIVE_TABLE_TYPE_BUILTIN = "hive_table";
protected static final String COLUMN_TYPE_BUILTIN = "hive_column";
protected static final String HIVE_PROCESS_TYPE_BUILTIN = "hive_process";
protected void createTypeDefinitionsV1() throws Exception { protected void createTypeDefinitionsV1() throws Exception {
HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
...@@ -323,7 +344,7 @@ public abstract class BaseResourceIT { ...@@ -323,7 +344,7 @@ public abstract class BaseResourceIT {
protected void createTypeDefinitionsV2() throws Exception { protected void createTypeDefinitionsV2() throws Exception {
AtlasEntityDef dbClsTypeDef = AtlasTypeUtil.createClassTypeDef( AtlasEntityDef dbClsTypeDef = AtlasTypeUtil.createClassTypeDef(
DATABASE_TYPE, DATABASE_TYPE_V2,
null, null,
AtlasTypeUtil.createUniqueRequiredAttrDef(NAME, "string"), AtlasTypeUtil.createUniqueRequiredAttrDef(NAME, "string"),
AtlasTypeUtil.createRequiredAttrDef(DESCRIPTION, "string"), AtlasTypeUtil.createRequiredAttrDef(DESCRIPTION, "string"),
...@@ -332,7 +353,7 @@ public abstract class BaseResourceIT { ...@@ -332,7 +353,7 @@ public abstract class BaseResourceIT {
AtlasTypeUtil.createOptionalAttrDef("createTime", "int")); AtlasTypeUtil.createOptionalAttrDef("createTime", "int"));
AtlasEntityDef columnClsDef = AtlasTypeUtil AtlasEntityDef columnClsDef = AtlasTypeUtil
.createClassTypeDef(COLUMN_TYPE, null, .createClassTypeDef(COLUMN_TYPE_V2, null,
AtlasTypeUtil.createOptionalAttrDef(NAME, "string"), AtlasTypeUtil.createOptionalAttrDef(NAME, "string"),
AtlasTypeUtil.createOptionalAttrDef("dataType", "string"), AtlasTypeUtil.createOptionalAttrDef("dataType", "string"),
AtlasTypeUtil.createOptionalAttrDef("comment", "string")); AtlasTypeUtil.createOptionalAttrDef("comment", "string"));
...@@ -348,20 +369,21 @@ public abstract class BaseResourceIT { ...@@ -348,20 +369,21 @@ public abstract class BaseResourceIT {
)); ));
AtlasEntityDef tblClsDef = AtlasTypeUtil AtlasEntityDef tblClsDef = AtlasTypeUtil
.createClassTypeDef(HIVE_TABLE_TYPE, .createClassTypeDef(HIVE_TABLE_TYPE_V2,
ImmutableSet.of("DataSet"), ImmutableSet.of("DataSet"),
AtlasTypeUtil.createOptionalAttrDef("owner", "string"), AtlasTypeUtil.createOptionalAttrDef("owner", "string"),
AtlasTypeUtil.createOptionalAttrDef("createTime", "long"), AtlasTypeUtil.createOptionalAttrDef("createTime", "long"),
AtlasTypeUtil.createOptionalAttrDef("lastAccessTime", "date"), AtlasTypeUtil.createOptionalAttrDef("lastAccessTime", "date"),
AtlasTypeUtil.createOptionalAttrDef("temporary", "boolean"), AtlasTypeUtil.createOptionalAttrDef("temporary", "boolean"),
AtlasTypeUtil.createRequiredAttrDef("db", DATABASE_TYPE), AtlasTypeUtil.createRequiredAttrDef("db", DATABASE_TYPE_V2),
AtlasTypeUtil.createRequiredAttrDef("columns", DataTypes.arrayTypeName(COLUMN_TYPE)), //some tests don't set the columns field or set it to null...
AtlasTypeUtil.createOptionalAttrDef("columns", DataTypes.arrayTypeName(COLUMN_TYPE_V2)),
AtlasTypeUtil.createOptionalAttrDef("tableType", "tableType"), AtlasTypeUtil.createOptionalAttrDef("tableType", "tableType"),
AtlasTypeUtil.createOptionalAttrDef("serde1", "serdeType"), AtlasTypeUtil.createOptionalAttrDef("serde1", "serdeType"),
AtlasTypeUtil.createOptionalAttrDef("serde2", "serdeType")); AtlasTypeUtil.createOptionalAttrDef("serde2", "serdeType"));
AtlasEntityDef loadProcessClsDef = AtlasTypeUtil AtlasEntityDef loadProcessClsDef = AtlasTypeUtil
.createClassTypeDef(HIVE_PROCESS_TYPE, .createClassTypeDef(HIVE_PROCESS_TYPE_V2,
ImmutableSet.of("Process"), ImmutableSet.of("Process"),
AtlasTypeUtil.createOptionalAttrDef("userName", "string"), AtlasTypeUtil.createOptionalAttrDef("userName", "string"),
AtlasTypeUtil.createOptionalAttrDef("startTime", "int"), AtlasTypeUtil.createOptionalAttrDef("startTime", "int"),
...@@ -415,7 +437,7 @@ public abstract class BaseResourceIT { ...@@ -415,7 +437,7 @@ public abstract class BaseResourceIT {
return RandomStringUtils.randomAlphabetic(1) + RandomStringUtils.randomAlphanumeric(9); return RandomStringUtils.randomAlphabetic(1) + RandomStringUtils.randomAlphanumeric(9);
} }
protected Referenceable createHiveTableInstanceV1(String dbName, String tableName, Id dbId) throws Exception { protected Referenceable createHiveTableInstanceBuiltIn(String dbName, String tableName, Id dbId) throws Exception {
Map<String, Object> values = new HashMap<>(); Map<String, Object> values = new HashMap<>();
values.put(NAME, dbName); values.put(NAME, dbName);
values.put(DESCRIPTION, "foo database"); values.put(DESCRIPTION, "foo database");
...@@ -426,7 +448,7 @@ public abstract class BaseResourceIT { ...@@ -426,7 +448,7 @@ public abstract class BaseResourceIT {
values.put("location", "/tmp"); values.put("location", "/tmp");
Referenceable databaseInstance = new Referenceable(dbId._getId(), dbId.getTypeName(), values); Referenceable databaseInstance = new Referenceable(dbId._getId(), dbId.getTypeName(), values);
Referenceable tableInstance = Referenceable tableInstance =
new Referenceable(HIVE_TABLE_TYPE, "classification", "pii", "phi", "pci", "sox", "sec", "finance"); new Referenceable(HIVE_TABLE_TYPE_BUILTIN, "classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set(NAME, tableName); tableInstance.set(NAME, tableName);
tableInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName); tableInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
tableInstance.set("db", databaseInstance); tableInstance.set("db", databaseInstance);
...@@ -458,7 +480,7 @@ public abstract class BaseResourceIT { ...@@ -458,7 +480,7 @@ public abstract class BaseResourceIT {
protected AtlasEntityWithAssociations createHiveTableInstanceV2(AtlasEntity databaseInstance, String tableName) throws Exception { protected AtlasEntityWithAssociations createHiveTableInstanceV2(AtlasEntity databaseInstance, String tableName) throws Exception {
AtlasEntityWithAssociations tableInstance = AtlasEntityWithAssociations tableInstance =
new AtlasEntityWithAssociations(HIVE_TABLE_TYPE); new AtlasEntityWithAssociations(HIVE_TABLE_TYPE_V2);
tableInstance.setClassifications( tableInstance.setClassifications(
Arrays.asList(new AtlasClassification("classification"), Arrays.asList(new AtlasClassification("classification"),
new AtlasClassification("pii"), new AtlasClassification("pii"),
...@@ -497,29 +519,34 @@ public abstract class BaseResourceIT { ...@@ -497,29 +519,34 @@ public abstract class BaseResourceIT {
return tableInstance; return tableInstance;
} }
protected Referenceable createHiveDBInstanceBuiltIn(String dbName) {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE_BUILTIN);
databaseInstance.set(NAME, dbName);
databaseInstance.set(QUALIFIED_NAME, dbName);
databaseInstance.set(CLUSTER_NAME, randomString());
databaseInstance.set(DESCRIPTION, "foo database");
return databaseInstance;
}
protected Referenceable createHiveDBInstanceV1(String dbName) { protected Referenceable createHiveDBInstanceV1(String dbName) {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE); Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
databaseInstance.set(NAME, dbName); databaseInstance.set(NAME, dbName);
databaseInstance.set(QUALIFIED_NAME, dbName);
databaseInstance.set(CLUSTER_NAME, randomString());
databaseInstance.set(DESCRIPTION, "foo database"); databaseInstance.set(DESCRIPTION, "foo database");
return databaseInstance; return databaseInstance;
} }
protected AtlasEntity createHiveDBInstanceV2(String dbName) { protected AtlasEntity createHiveDBInstanceV2(String dbName) {
AtlasEntity atlasEntity = new AtlasEntity(DATABASE_TYPE); AtlasEntity atlasEntity = new AtlasEntity(DATABASE_TYPE_V2);
atlasEntity.setAttribute(NAME, dbName); atlasEntity.setAttribute(NAME, dbName);
atlasEntity.setAttribute(QUALIFIED_NAME, dbName);
atlasEntity.setAttribute(DESCRIPTION, "foo database"); atlasEntity.setAttribute(DESCRIPTION, "foo database");
atlasEntity.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName);
atlasEntity.setAttribute("owner", "user1"); atlasEntity.setAttribute("owner", "user1");
atlasEntity.setAttribute(CLUSTER_NAME, "cl1"); atlasEntity.setAttribute("locationUri", "/tmp");
atlasEntity.setAttribute("parameters", Collections.EMPTY_MAP); atlasEntity.setAttribute("createTime",1000);
atlasEntity.setAttribute("location", "/tmp");
return atlasEntity; return atlasEntity;
} }
public interface Predicate { public interface Predicate {
/** /**
......
...@@ -149,8 +149,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT { ...@@ -149,8 +149,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
LOG.info("JsonRow - {}", row); LOG.info("JsonRow - {}", row);
Assert.assertNotNull(row.getString("name")); Assert.assertNotNull(row.getString("name"));
Assert.assertNotNull(row.getString("comment")); Assert.assertNotNull(row.getString("comment"));
Assert.assertNotNull(row.getString("type")); Assert.assertEquals(row.getString("$typeName$"), "hive_column_v1");
Assert.assertEquals(row.getString("$typeName$"), "hive_column");
} }
} }
...@@ -168,8 +167,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT { ...@@ -168,8 +167,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
LOG.info("JsonRow - {}", row); LOG.info("JsonRow - {}", row);
Assert.assertNotNull(row.getString("name")); Assert.assertNotNull(row.getString("name"));
Assert.assertNotNull(row.getString("comment")); Assert.assertNotNull(row.getString("comment"));
Assert.assertNotNull(row.getString("type")); Assert.assertEquals(row.getString("$typeName$"), "hive_column_v1");
Assert.assertEquals(row.getString("$typeName$"), "hive_column");
} }
} }
......
...@@ -56,12 +56,12 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -56,12 +56,12 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
super.setUp(); super.setUp();
dbName = "db" + randomString(); dbName = "db" + randomString();
createTypes(); createTypes();
createInstance(createHiveDBInstanceV1(dbName)); createInstance(createHiveDBInstanceBuiltIn(dbName));
} }
@Test @Test
public void testSearchByDSL() throws Exception { public void testSearchByDSL() throws Exception {
String dslQuery = "from "+ DATABASE_TYPE + " " + QUALIFIED_NAME + "=\"" + dbName + "\""; String dslQuery = "from "+ DATABASE_TYPE_BUILTIN + " " + QUALIFIED_NAME + "=\"" + dbName + "\"";
AtlasSearchResult searchResult = discoveryClientV2.dslSearch(dslQuery); AtlasSearchResult searchResult = discoveryClientV2.dslSearch(dslQuery);
assertNotNull(searchResult); assertNotNull(searchResult);
...@@ -73,7 +73,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -73,7 +73,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
assertEquals(entities.size(), 1); assertEquals(entities.size(), 1);
AtlasEntityHeaderWithAssociations dbEntity = entities.get(0); AtlasEntityHeaderWithAssociations dbEntity = entities.get(0);
assertEquals(dbEntity.getTypeName(), DATABASE_TYPE); assertEquals(dbEntity.getTypeName(), DATABASE_TYPE_BUILTIN);
assertEquals(dbEntity.getDisplayText(), dbName); assertEquals(dbEntity.getDisplayText(), dbName);
assertEquals(dbEntity.getStatus(), Status.ACTIVE); assertEquals(dbEntity.getStatus(), Status.ACTIVE);
assertNotNull(dbEntity.getGuid()); assertNotNull(dbEntity.getGuid());
...@@ -83,7 +83,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -83,7 +83,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test @Test
public void testSearchDSLLimits() throws Exception { public void testSearchDSLLimits() throws Exception {
String dslQuery = "from "+ DATABASE_TYPE + " " + QUALIFIED_NAME + "=\"" + dbName + "\""; String dslQuery = "from "+ DATABASE_TYPE_BUILTIN + " " + QUALIFIED_NAME + "=\"" + dbName + "\"";
AtlasSearchResult searchResult = discoveryClientV2.dslSearch(dslQuery); AtlasSearchResult searchResult = discoveryClientV2.dslSearch(dslQuery);
assertNotNull(searchResult); assertNotNull(searchResult);
...@@ -124,7 +124,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -124,7 +124,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test @Test
public void testSearchUsingDSL() throws Exception { public void testSearchUsingDSL() throws Exception {
String query = "from "+ DATABASE_TYPE + " " + QUALIFIED_NAME + "=\"" + dbName + "\""; String query = "from "+ DATABASE_TYPE_BUILTIN + " " + QUALIFIED_NAME + "=\"" + dbName + "\"";
AtlasSearchResult searchResult = discoveryClientV2.dslSearch(query); AtlasSearchResult searchResult = discoveryClientV2.dslSearch(query);
assertNotNull(searchResult); assertNotNull(searchResult);
...@@ -135,7 +135,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -135,7 +135,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
assertEquals(entities.size(), 1); assertEquals(entities.size(), 1);
AtlasEntityHeaderWithAssociations dbEntity = entities.get(0); AtlasEntityHeaderWithAssociations dbEntity = entities.get(0);
assertEquals(dbEntity.getTypeName(), DATABASE_TYPE); assertEquals(dbEntity.getTypeName(), DATABASE_TYPE_BUILTIN);
assertEquals(dbEntity.getDisplayText(), dbName); assertEquals(dbEntity.getDisplayText(), dbName);
assertEquals(dbEntity.getStatus(), Status.ACTIVE); assertEquals(dbEntity.getStatus(), Status.ACTIVE);
...@@ -166,7 +166,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -166,7 +166,7 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
AtlasFullTextResult result = fullTextResults.get(0); AtlasFullTextResult result = fullTextResults.get(0);
assertNotNull(result.getEntity()); assertNotNull(result.getEntity());
assertEquals(result.getEntity().getTypeName(), DATABASE_TYPE); assertEquals(result.getEntity().getTypeName(), DATABASE_TYPE_BUILTIN);
assertNotNull(result.getScore()); assertNotNull(result.getScore());
//API works without limit and offset //API works without limit and offset
......
...@@ -66,7 +66,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -66,7 +66,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test @Test
public void testSearchByDSL() throws Exception { public void testSearchByDSL() throws Exception {
String dslQuery = "from "+ DATABASE_TYPE + " qualifiedName=\"" + dbName + "\""; String dslQuery = "from "+ DATABASE_TYPE + " name=\"" + dbName + "\"";
MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl();
queryParams.add("query", dslQuery); queryParams.add("query", dslQuery);
JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH_DSL, queryParams); JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH_DSL, queryParams);
...@@ -89,7 +89,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -89,7 +89,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
public void testSearchDSLLimits() throws Exception { public void testSearchDSLLimits() throws Exception {
//search without new parameters of limit and offset should work //search without new parameters of limit and offset should work
String dslQuery = "from "+ DATABASE_TYPE + " qualifiedName=\"" + dbName + "\""; String dslQuery = "from "+ DATABASE_TYPE + " name=\"" + dbName + "\"";
MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl();
queryParams.add("query", dslQuery); queryParams.add("query", dslQuery);
JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH_DSL, queryParams); JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH_DSL, queryParams);
...@@ -146,7 +146,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -146,7 +146,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test @Test
public void testSearchUsingGremlin() throws Exception { public void testSearchUsingGremlin() throws Exception {
String query = "g.V.has('type', 'hive_db').toList()"; String query = "g.V.has('type', '" + BaseResourceIT.HIVE_TABLE_TYPE + "').toList()";
MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl();
queryParams.add("query", query); queryParams.add("query", query);
...@@ -162,7 +162,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -162,7 +162,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
@Test @Test
public void testSearchUsingDSL() throws Exception { public void testSearchUsingDSL() throws Exception {
//String query = "from dsl_test_type"; //String query = "from dsl_test_type";
String query = "from "+ DATABASE_TYPE + " qualifiedName=\"" + dbName +"\""; String query = "from "+ DATABASE_TYPE + " name=\"" + dbName +"\"";
MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl();
queryParams.add("query", query); queryParams.add("query", query);
JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH, queryParams); JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH, queryParams);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment