Commit 9a067d38 by Pinal Shah Committed by nixonrodrigues

ATLAS-3782 : Support NOT_CONTAINS operator in basic search

parent ec314fde
...@@ -195,11 +195,15 @@ public class AtlasSolrQueryBuilder { ...@@ -195,11 +195,15 @@ public class AtlasSolrQueryBuilder {
if (!indexAttributes.contains(indexAttributeName)) { if (!indexAttributes.contains(indexAttributeName)) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
if (attributeName.equals(CUSTOM_ATTRIBUTES_PROPERTY_KEY) && operator.equals(Operator.CONTAINS)) { if (attributeName.equals(CUSTOM_ATTRIBUTES_PROPERTY_KEY)) {
// CustomAttributes stores key value pairs in String format, so ideally it should be 'contains' operator to search for one pair, // CustomAttributes stores key value pairs in String format, so ideally it should be 'contains' operator to search for one pair,
// for use-case, E1 having key1=value1 and E2 having key1=value2, searching key1=value1 results both E1,E2 // for use-case, E1 having key1=value1 and E2 having key1=value2, searching key1=value1 results both E1,E2
// surrounding inverted commas to attributeValue works // surrounding inverted commas to attributeValue works
operator = Operator.EQ; if (operator.equals(Operator.CONTAINS)) {
operator = Operator.EQ;
} else if (operator.equals(Operator.NOT_CONTAINS)) {
operator = Operator.NEQ;
}
attributeValue = getIndexQueryAttributeValue(attributeValue); attributeValue = getIndexQueryAttributeValue(attributeValue);
} }
...@@ -261,6 +265,9 @@ public class AtlasSolrQueryBuilder { ...@@ -261,6 +265,9 @@ public class AtlasSolrQueryBuilder {
case CONTAINS: case CONTAINS:
withContains(queryBuilder, indexFieldName, attributeValue); withContains(queryBuilder, indexFieldName, attributeValue);
break; break;
case NOT_CONTAINS:
withNotContains(queryBuilder, indexFieldName, attributeValue);
break;
case IS_NULL: case IS_NULL:
withIsNull(queryBuilder, indexFieldName); withIsNull(queryBuilder, indexFieldName);
break; break;
...@@ -388,6 +395,10 @@ public class AtlasSolrQueryBuilder { ...@@ -388,6 +395,10 @@ public class AtlasSolrQueryBuilder {
queryBuilder.append("+").append(indexFieldName).append(":*").append(attributeValue).append("* "); queryBuilder.append("+").append(indexFieldName).append(":*").append(attributeValue).append("* ");
} }
private void withNotContains(StringBuilder queryBuilder, String indexFieldName, String attributeValue) {
queryBuilder.append("*:* -").append(indexFieldName).append(":*").append(attributeValue).append("* ");
}
private void withIsNull(StringBuilder queryBuilder, String indexFieldName) { private void withIsNull(StringBuilder queryBuilder, String indexFieldName) {
queryBuilder.append("-").append(indexFieldName).append(":*").append(" "); queryBuilder.append("-").append(indexFieldName).append(":*").append(" ");
} }
......
...@@ -464,9 +464,9 @@ public abstract class SearchProcessor { ...@@ -464,9 +464,9 @@ public abstract class SearchProcessor {
AtlasType attributeType = structType.getAttributeType(filterCriteria.getAttributeName()); AtlasType attributeType = structType.getAttributeType(filterCriteria.getAttributeName());
if (AtlasBaseTypeDef.ATLAS_TYPE_STRING.equals(attributeType.getTypeName())) { if (AtlasBaseTypeDef.ATLAS_TYPE_STRING.equals(attributeType.getTypeName())) {
if (filterCriteria.getOperator() == SearchParameters.Operator.NEQ) { if (filterCriteria.getOperator() == SearchParameters.Operator.NEQ || filterCriteria.getOperator() == SearchParameters.Operator.NOT_CONTAINS) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("NEQ operator found for string attribute {}, deferring to in-memory or graph query (might cause poor performance)", qualifiedName); LOG.debug("{} operator found for string attribute {}, deferring to in-memory or graph query (might cause poor performance)", filterCriteria.getOperator(), qualifiedName);
} }
ret = false; ret = false;
...@@ -633,6 +633,7 @@ public abstract class SearchProcessor { ...@@ -633,6 +633,7 @@ public abstract class SearchProcessor {
op = SearchParameters.Operator.NOT_CONTAINS; op = SearchParameters.Operator.NOT_CONTAINS;
break; break;
case CONTAINS: case CONTAINS:
case NOT_CONTAINS:
if (attrName.equals(CUSTOM_ATTRIBUTES_PROPERTY_KEY)) { if (attrName.equals(CUSTOM_ATTRIBUTES_PROPERTY_KEY)) {
attrVal = getCustomAttributeIndexQueryValue(attrVal, true); attrVal = getCustomAttributeIndexQueryValue(attrVal, true);
} }
...@@ -849,6 +850,8 @@ public abstract class SearchProcessor { ...@@ -849,6 +850,8 @@ public abstract class SearchProcessor {
case NOT_NULL: case NOT_NULL:
innerQry.has(qualifiedName, AtlasGraphQuery.ComparisionOperator.NOT_EQUAL, null); innerQry.has(qualifiedName, AtlasGraphQuery.ComparisionOperator.NOT_EQUAL, null);
break; break;
case NOT_CONTAINS:
break;
default: default:
LOG.warn("{}: unsupported operator. Ignored", operator); LOG.warn("{}: unsupported operator. Ignored", operator);
break; break;
......
...@@ -19,10 +19,12 @@ package org.apache.atlas; ...@@ -19,10 +19,12 @@ package org.apache.atlas;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.glossary.GlossaryService;
import org.apache.atlas.model.discovery.SearchParameters; import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.model.instance.AtlasClassification; import org.apache.atlas.model.glossary.AtlasGlossary;
import org.apache.atlas.model.instance.AtlasEntity; import org.apache.atlas.model.glossary.AtlasGlossaryTerm;
import org.apache.atlas.model.instance.AtlasObjectId; import org.apache.atlas.model.glossary.relations.AtlasGlossaryHeader;
import org.apache.atlas.model.instance.*;
import org.apache.atlas.model.typedef.*; import org.apache.atlas.model.typedef.*;
import org.apache.atlas.repository.store.graph.AtlasEntityStore; import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.repository.store.graph.v2.AtlasEntityStream; import org.apache.atlas.repository.store.graph.v2.AtlasEntityStream;
...@@ -42,6 +44,7 @@ import static org.testng.Assert.fail; ...@@ -42,6 +44,7 @@ import static org.testng.Assert.fail;
public abstract class BasicTestSetup { public abstract class BasicTestSetup {
// Entity type //
protected static final String DATABASE_TYPE = "hive_db"; protected static final String DATABASE_TYPE = "hive_db";
protected static final String HIVE_TABLE_TYPE = "hive_table"; protected static final String HIVE_TABLE_TYPE = "hive_table";
private static final String COLUMN_TYPE = "hive_column"; private static final String COLUMN_TYPE = "hive_column";
...@@ -50,6 +53,7 @@ public abstract class BasicTestSetup { ...@@ -50,6 +53,7 @@ public abstract class BasicTestSetup {
private static final String VIEW_TYPE = "hive_process"; private static final String VIEW_TYPE = "hive_process";
protected static final String DATASET_SUBTYPE = "Asset"; protected static final String DATASET_SUBTYPE = "Asset";
//Classification type //
public static final String DIMENSION_CLASSIFICATION = "Dimension"; public static final String DIMENSION_CLASSIFICATION = "Dimension";
public static final String FACT_CLASSIFICATION = "Fact"; public static final String FACT_CLASSIFICATION = "Fact";
public static final String PII_CLASSIFICATION = "PII"; public static final String PII_CLASSIFICATION = "PII";
...@@ -59,14 +63,21 @@ public abstract class BasicTestSetup { ...@@ -59,14 +63,21 @@ public abstract class BasicTestSetup {
public static final String LOGDATA_CLASSIFICATION = "Log Data"; public static final String LOGDATA_CLASSIFICATION = "Log Data";
public static final String DIMENSIONAL_CLASSIFICATION = "Dimensional"; public static final String DIMENSIONAL_CLASSIFICATION = "Dimensional";
// Glossary type //
public static final String SALES_GLOSSARY = "salesGlossary";
public static final String SALES_TERM = "salesTerm";
@Inject @Inject
protected AtlasTypeRegistry typeRegistry; protected AtlasTypeRegistry typeRegistry;
@Inject @Inject
protected AtlasTypeDefStore typeDefStore; protected AtlasTypeDefStore typeDefStore;
@Inject @Inject
protected AtlasEntityStore entityStore; protected AtlasEntityStore entityStore;
@Inject
protected GlossaryService glossaryService;
private boolean baseLoaded = false; private boolean baseLoaded = false;
private EntityMutationResponse hiveEntities;
protected void setupTestData() { protected void setupTestData() {
loadBaseModels(); loadBaseModels();
...@@ -77,6 +88,7 @@ public abstract class BasicTestSetup { ...@@ -77,6 +88,7 @@ public abstract class BasicTestSetup {
private void loadBaseModels() { private void loadBaseModels() {
try { try {
loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry); loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
loadModelFromJson("0000-Area0/0011-glossary_model.json", typeDefStore, typeRegistry);
baseLoaded = true; baseLoaded = true;
} catch (IOException | AtlasBaseException e) { } catch (IOException | AtlasBaseException e) {
fail("Base model setup is required for test to run!"); fail("Base model setup is required for test to run!");
...@@ -97,7 +109,7 @@ public abstract class BasicTestSetup { ...@@ -97,7 +109,7 @@ public abstract class BasicTestSetup {
AtlasEntity.AtlasEntitiesWithExtInfo hiveTestEntities = hiveTestEntities(); AtlasEntity.AtlasEntitiesWithExtInfo hiveTestEntities = hiveTestEntities();
try { try {
entityStore.createOrUpdate(new AtlasEntityStream(hiveTestEntities), false); hiveEntities = entityStore.createOrUpdate(new AtlasEntityStream(hiveTestEntities), false);
} catch (AtlasBaseException e) { } catch (AtlasBaseException e) {
fail("Hive entities need to be created for test to run!"); fail("Hive entities need to be created for test to run!");
} }
...@@ -450,12 +462,13 @@ public abstract class BasicTestSetup { ...@@ -450,12 +462,13 @@ public abstract class BasicTestSetup {
return datasetSubType; return datasetSubType;
} }
public void createDummyEntity(String name, String type, String... traitNames) throws AtlasBaseException { public EntityMutationResponse createDummyEntity(String name, String type, String... traitNames) throws AtlasBaseException {
AtlasEntity entity = new AtlasEntity(type); AtlasEntity entity = new AtlasEntity(type);
entity.setAttribute("name", name); entity.setAttribute("name", name);
entity.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); entity.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
entity.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList())); entity.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList()));
entityStore.createOrUpdate(new AtlasEntityStream(new AtlasEntity.AtlasEntitiesWithExtInfo(entity)), false); EntityMutationResponse resp = entityStore.createOrUpdate(new AtlasEntityStream(new AtlasEntity.AtlasEntitiesWithExtInfo(entity)), false);
return resp;
} }
public SearchParameters.FilterCriteria getSingleFilterCondition(String attName, SearchParameters.Operator op, String attrValue) { public SearchParameters.FilterCriteria getSingleFilterCondition(String attName, SearchParameters.Operator op, String attrValue) {
...@@ -472,4 +485,22 @@ public abstract class BasicTestSetup { ...@@ -472,4 +485,22 @@ public abstract class BasicTestSetup {
return filterCriteria; return filterCriteria;
} }
public void assignGlossary() throws AtlasBaseException {
AtlasGlossary glossary = new AtlasGlossary();
glossary.setName(SALES_GLOSSARY);
glossary = glossaryService.createGlossary(glossary);
AtlasGlossaryTerm term = new AtlasGlossaryTerm();
term.setAnchor(new AtlasGlossaryHeader(glossary.getGuid()));
term.setName(SALES_TERM);
term = glossaryService.createTerm(term);
List<AtlasRelatedObjectId> guids = hiveEntities.getCreatedEntities().stream().filter(e -> e.getTypeName().equals(HIVE_TABLE_TYPE))
.map(p -> {AtlasRelatedObjectId obj = new AtlasRelatedObjectId();
obj.setGuid(p.getGuid());
obj.setTypeName(p.getTypeName()); return obj;}).collect(Collectors.toList());
glossaryService.assignTermToEntities(term.getGuid(), guids);
}
} }
...@@ -23,11 +23,13 @@ import org.apache.atlas.SortOrder; ...@@ -23,11 +23,13 @@ import org.apache.atlas.SortOrder;
import org.apache.atlas.TestModules; import org.apache.atlas.TestModules;
import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.discovery.SearchParameters; import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.repository.graph.AtlasGraphProvider;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer; import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graphdb.AtlasGraph; import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever;
import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.type.AtlasTypeRegistry;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Guice; import org.testng.annotations.Guice;
import org.testng.annotations.Test; import org.testng.annotations.Test;
...@@ -60,6 +62,7 @@ public class EntitySearchProcessorTest extends BasicTestSetup { ...@@ -60,6 +62,7 @@ public class EntitySearchProcessorTest extends BasicTestSetup {
@Inject @Inject
public GraphBackedSearchIndexer indexer; public GraphBackedSearchIndexer indexer;
private String expectedEntityName = "hive_Table_Null_tableType";
@Test @Test
public void searchTablesByClassification() throws AtlasBaseException { public void searchTablesByClassification() throws AtlasBaseException {
...@@ -132,7 +135,6 @@ public class EntitySearchProcessorTest extends BasicTestSetup { ...@@ -132,7 +135,6 @@ public class EntitySearchProcessorTest extends BasicTestSetup {
@Test(priority = -1) @Test(priority = -1)
public void searchWithNEQ_stringAttr() throws AtlasBaseException { public void searchWithNEQ_stringAttr() throws AtlasBaseException {
String expectedEntityName = "hive_Table_Null_tableType";
createDummyEntity(expectedEntityName,HIVE_TABLE_TYPE); createDummyEntity(expectedEntityName,HIVE_TABLE_TYPE);
SearchParameters params = new SearchParameters(); SearchParameters params = new SearchParameters();
params.setTypeName(HIVE_TABLE_TYPE); params.setTypeName(HIVE_TABLE_TYPE);
...@@ -154,7 +156,7 @@ public class EntitySearchProcessorTest extends BasicTestSetup { ...@@ -154,7 +156,7 @@ public class EntitySearchProcessorTest extends BasicTestSetup {
assertTrue(nameList.contains(expectedEntityName)); assertTrue(nameList.contains(expectedEntityName));
} }
@Test(dependsOnMethods = "searchWithNEQ_stringAttr") @Test
public void searchWithNEQ_pipeSeperatedAttr() throws AtlasBaseException { public void searchWithNEQ_pipeSeperatedAttr() throws AtlasBaseException {
SearchParameters params = new SearchParameters(); SearchParameters params = new SearchParameters();
params.setTypeName(HIVE_TABLE_TYPE); params.setTypeName(HIVE_TABLE_TYPE);
...@@ -173,10 +175,10 @@ public class EntitySearchProcessorTest extends BasicTestSetup { ...@@ -173,10 +175,10 @@ public class EntitySearchProcessorTest extends BasicTestSetup {
nameList.add((String) entityRetriever.toAtlasEntityHeader(vertex, Collections.singleton("name")).getAttribute("name")); nameList.add((String) entityRetriever.toAtlasEntityHeader(vertex, Collections.singleton("name")).getAttribute("name"));
} }
assertTrue(nameList.contains("hive_Table_Null_tableType")); assertTrue(nameList.contains(expectedEntityName));
} }
@Test(dependsOnMethods = "searchWithNEQ_stringAttr") @Test
public void searchWithNEQ_doubleAttr() throws AtlasBaseException { public void searchWithNEQ_doubleAttr() throws AtlasBaseException {
SearchParameters params = new SearchParameters(); SearchParameters params = new SearchParameters();
params.setTypeName(HIVE_TABLE_TYPE); params.setTypeName(HIVE_TABLE_TYPE);
...@@ -309,4 +311,53 @@ public class EntitySearchProcessorTest extends BasicTestSetup { ...@@ -309,4 +311,53 @@ public class EntitySearchProcessorTest extends BasicTestSetup {
assertEquals(processor.execute().size(), 2); assertEquals(processor.execute().size(), 2);
} }
@Test
public void searchWithNotContains_stringAttr() throws AtlasBaseException {
SearchParameters params = new SearchParameters();
params.setTypeName(HIVE_TABLE_TYPE);
SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("tableType", SearchParameters.Operator.NOT_CONTAINS, "Managed");
params.setEntityFilters(filterCriteria);
params.setLimit(20);
SearchContext context = new SearchContext(params, typeRegistry, graph, indexer.getVertexIndexKeys());
EntitySearchProcessor processor = new EntitySearchProcessor(context);
List<AtlasVertex> vertices = processor.execute();
assertEquals(vertices.size(), 3);
List<String> nameList = new ArrayList<>();
for (AtlasVertex vertex : vertices) {
nameList.add((String) entityRetriever.toAtlasEntityHeader(vertex, Collections.singleton("name")).getAttribute("name"));
}
assertTrue(nameList.contains(expectedEntityName));
}
@Test
public void searchWithNotContains_pipeSeperatedAttr() throws AtlasBaseException {
SearchParameters params = new SearchParameters();
params.setTypeName(HIVE_TABLE_TYPE);
SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("__classificationNames", SearchParameters.Operator.NOT_CONTAINS, METRIC_CLASSIFICATION);
params.setEntityFilters(filterCriteria);
params.setLimit(20);
SearchContext context = new SearchContext(params, typeRegistry, graph, indexer.getVertexIndexKeys());
EntitySearchProcessor processor = new EntitySearchProcessor(context);
List<AtlasVertex> vertices = processor.execute();
assertEquals(vertices.size(), 7);
List<String> nameList = new ArrayList<>();
for (AtlasVertex vertex : vertices) {
nameList.add((String) entityRetriever.toAtlasEntityHeader(vertex, Collections.singleton("name")).getAttribute("name"));
}
assertTrue(nameList.contains(expectedEntityName));
}
@AfterClass
public void teardown() {
AtlasGraphProvider.cleanup();
}
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment