Commit 8b5cb9d9 by Pinal Shah Committed by Sarath Subramanian

ATLAS-3838: Support multiple tag/classification in basic/quick search API

ATLAS-3652: Quick Search: API requirement for GET request on multiple entity types Signed-off-by: 's avatarSarath Subramanian <sarath@apache.org> both JIRA's addressed in the same commit.
parent e691a32c
......@@ -25,14 +25,14 @@ import java.util.Map;
import java.util.Set;
public class AggregationContext {
private final String queryString;
private final FilterCriteria filterCriteria;
private final AtlasEntityType searchForEntityType;
private final Set<String> aggregationFieldNames;
private final Set<AtlasAttribute> aggregationAttributes;
private final Map<String, String> indexFieldNameCache;
private final boolean excludeDeletedEntities;
private final boolean includeSubTypes;
private final String queryString;
private final FilterCriteria filterCriteria;
private final Set<AtlasEntityType> searchForEntityTypes;
private final Set<String> aggregationFieldNames;
private final Set<AtlasAttribute> aggregationAttributes;
private final Map<String, String> indexFieldNameCache;
private final boolean excludeDeletedEntities;
private final boolean includeSubTypes;
/**
* @param queryString the query string whose aggregation metrics need to be retrieved.
......@@ -41,17 +41,17 @@ public class AggregationContext {
* @param indexFieldNameCache
* @param excludeDeletedEntities a boolean flag to indicate if the deleted entities need to be excluded in search
*/
public AggregationContext(String queryString,
FilterCriteria filterCriteria,
AtlasEntityType searchForEntityType,
Set<String> aggregationFieldNames,
Set<AtlasAttribute> aggregationAttributes,
Map<String, String> indexFieldNameCache,
boolean excludeDeletedEntities,
boolean includeSubTypes) {
public AggregationContext(String queryString,
FilterCriteria filterCriteria,
Set<AtlasEntityType> searchForEntityType,
Set<String> aggregationFieldNames,
Set<AtlasAttribute> aggregationAttributes,
Map<String, String> indexFieldNameCache,
boolean excludeDeletedEntities,
boolean includeSubTypes) {
this.queryString = queryString;
this.filterCriteria = filterCriteria;
this.searchForEntityType = searchForEntityType;
this.searchForEntityTypes = searchForEntityType;
this.aggregationFieldNames = aggregationFieldNames;
this.aggregationAttributes = aggregationAttributes;
this.indexFieldNameCache = indexFieldNameCache;
......@@ -67,8 +67,8 @@ public class AggregationContext {
return filterCriteria;
}
public AtlasEntityType getSearchForEntityType() {
return searchForEntityType;
public Set<AtlasEntityType> getSearchForEntityTypes() {
return searchForEntityTypes;
}
public Set<String> getAggregationFieldNames() {
......
......@@ -175,7 +175,7 @@ public class AtlasJanusGraphIndexClient implements AtlasGraphIndexClient {
Map<String, String> indexFieldName2PropertyKeyNameMap = new HashMap<>();
AtlasSolrQueryBuilder solrQueryBuilder = new AtlasSolrQueryBuilder();
solrQueryBuilder.withEntityType(aggregationContext.getSearchForEntityType())
solrQueryBuilder.withEntityTypes(aggregationContext.getSearchForEntityTypes())
.withQueryString(aggregationContext.getQueryString())
.withCriteria(aggregationContext.getFilterCriteria())
.withExcludedDeletedEntities(aggregationContext.isExcludeDeletedEntities())
......
......@@ -29,31 +29,28 @@ import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.*;
import static org.apache.atlas.repository.Constants.CUSTOM_ATTRIBUTES_PROPERTY_KEY;
public class AtlasSolrQueryBuilder {
private static final Logger LOG = LoggerFactory.getLogger(AtlasSolrQueryBuilder.class);
private AtlasEntityType entityType;
private String queryString;
private FilterCriteria criteria;
private boolean excludeDeletedEntities;
private boolean includeSubtypes;
private Map<String, String> indexFieldNameCache;
public static final char CUSTOM_ATTR_SEPARATOR = '=';
public static final String CUSTOM_ATTR_SEARCH_FORMAT = "\"\\\"%s\\\":\\\"%s\\\"\"";
private Set<AtlasEntityType> entityTypes;
private String queryString;
private FilterCriteria criteria;
private boolean excludeDeletedEntities;
private boolean includeSubtypes;
private Map<String, String> indexFieldNameCache;
public static final char CUSTOM_ATTR_SEPARATOR = '=';
public static final String CUSTOM_ATTR_SEARCH_FORMAT = "\"\\\"%s\\\":\\\"%s\\\"\"";
public AtlasSolrQueryBuilder() {
}
public AtlasSolrQueryBuilder withEntityType(AtlasEntityType searchForEntityType) {
this.entityType = searchForEntityType;
public AtlasSolrQueryBuilder withEntityTypes(Set<AtlasEntityType> searchForEntityTypes) {
this.entityTypes = searchForEntityTypes;
return this;
}
......@@ -112,7 +109,7 @@ public class AtlasSolrQueryBuilder {
isAndNeeded = true;
}
if (entityType != null) {
if (CollectionUtils.isNotEmpty(entityTypes)) {
if (isAndNeeded) {
queryBuilder.append(" AND ");
}
......@@ -140,29 +137,25 @@ public class AtlasSolrQueryBuilder {
}
private void buildForEntityType(StringBuilder queryBuilder) {
if (LOG.isDebugEnabled()) {
LOG.debug("Search is being done for entities of type {}", entityType.getTypeName());
}
String typeIndexFieldName = indexFieldNameCache.get(Constants.ENTITY_TYPE_PROPERTY_KEY);
queryBuilder.append(" +")
.append(typeIndexFieldName)
.append(":(")
.append(entityType.getTypeName())
.append(" ");
.append(":(");
if (includeSubtypes) {
Set<String> allSubTypes = entityType.getAllSubTypes();
Set<String> typesToSearch = new HashSet<>();
for (AtlasEntityType type : entityTypes) {
if(allSubTypes.size() != 0 ) {
for(String subTypeName: allSubTypes) {
queryBuilder.append(subTypeName).append(" ");
}
if (includeSubtypes) {
typesToSearch.addAll(type.getTypeAndAllSubTypes());
} else {
typesToSearch.add(type.getTypeName());
}
}
queryBuilder.append(" ) ");
queryBuilder.append(StringUtils.join(typesToSearch, " ")).append(" ) ");
}
private void dropDeletedEntities(StringBuilder queryBuilder) throws AtlasBaseException {
......@@ -173,9 +166,8 @@ public class AtlasSolrQueryBuilder {
String indexFieldName = indexFieldNameCache.get(Constants.STATE_PROPERTY_KEY);
if (indexFieldName == null) {
String msg = String.format("There is no index field name defined for attribute '%s' for entity '%s'",
Constants.STATE_PROPERTY_KEY,
entityType.getTypeName());
String msg = String.format("There is no index field name defined for attribute '%s'",
Constants.STATE_PROPERTY_KEY);
LOG.error(msg);
......@@ -187,10 +179,46 @@ public class AtlasSolrQueryBuilder {
private AtlasSolrQueryBuilder withCriteria(StringBuilder queryBuilder, FilterCriteria criteria) throws AtlasBaseException {
List<FilterCriteria> criterion = criteria.getCriterion();
Set<String> indexAttributes = new HashSet<>();
if (StringUtils.isNotEmpty(criteria.getAttributeName()) && CollectionUtils.isEmpty(criterion)) { // no child criterion
String attributeName = criteria.getAttributeName();
String attributeValue = criteria.getAttributeValue();
Operator operator = criteria.getOperator();
ArrayList<StringBuilder> orExpQuery = new ArrayList<>();
for (AtlasEntityType type : entityTypes) {
String indexAttributeName = getIndexAttributeName(type, attributeName);
//check to remove duplicate attribute query (for eg. name)
if (!indexAttributes.contains(indexAttributeName)) {
StringBuilder sb = new StringBuilder();
if (attributeName.equals(CUSTOM_ATTRIBUTES_PROPERTY_KEY) && operator.equals(Operator.CONTAINS)) {
// CustomAttributes stores key value pairs in String format, so ideally it should be 'contains' operator to search for one pair,
// for use-case, E1 having key1=value1 and E2 having key1=value2, searching key1=value1 results both E1,E2
// surrounding inverted commas to attributeValue works
operator = Operator.EQ;
attributeValue = getIndexQueryAttributeValue(attributeValue);
}
withPropertyCondition(sb, indexAttributeName, operator, attributeValue);
indexAttributes.add(indexAttributeName);
orExpQuery.add(sb);
}
}
if (CollectionUtils.isNotEmpty(orExpQuery)) {
if (orExpQuery.size() > 1) {
String orExpStr = StringUtils.join(orExpQuery, FilterCriteria.Condition.OR.name());
queryBuilder.append(" ( ").append(orExpStr).append(" ) ");
} else {
queryBuilder.append(orExpQuery.iterator().next());
}
}
if(criterion == null || CollectionUtils.isEmpty(criteria.getCriterion())) { // no child criterion
withPropertyCondition(queryBuilder, criteria.getAttributeName(), criteria.getOperator(), criteria.getAttributeValue());
} else {
} else if (CollectionUtils.isNotEmpty(criterion)) {
beginCriteria(queryBuilder);
for (Iterator<FilterCriteria> iterator = criterion.iterator(); iterator.hasNext(); ) {
......@@ -209,40 +237,12 @@ public class AtlasSolrQueryBuilder {
return this;
}
private void withPropertyCondition(StringBuilder queryBuilder, String attributeName, Operator operator, String attributeValue) throws AtlasBaseException {
if (StringUtils.isNotEmpty(attributeName) && operator != null) {
private void withPropertyCondition(StringBuilder queryBuilder, String indexFieldName, Operator operator, String attributeValue) throws AtlasBaseException {
if (StringUtils.isNotEmpty(indexFieldName) && operator != null) {
if (attributeValue != null) {
attributeValue = attributeValue.trim();
}
if (attributeName.equals(CUSTOM_ATTRIBUTES_PROPERTY_KEY) && operator.equals(Operator.CONTAINS)) {
// CustomAttributes stores key value pairs in String format, so ideally it should be 'contains' operator to search for one pair,
// for use-case, E1 having key1=value1 and E2 having key1=value2, searching key1=value1 results both E1,E2
// surrounding inverted commas to attributeValue works
operator = Operator.EQ;
attributeValue = getIndexQueryAttributeValue(attributeValue);
}
AtlasAttribute attribute = entityType.getAttribute(attributeName);
if (attribute == null) {
String msg = String.format("Received unknown attribute '%s' for type '%s'.", attributeName, entityType.getTypeName());
LOG.error(msg);
throw new AtlasBaseException(msg);
}
String indexFieldName = attribute.getIndexFieldName();
if (indexFieldName == null) {
String msg = String.format("Received non-index attribute %s for type %s.", attributeName, entityType.getTypeName());
LOG.error(msg);
throw new AtlasBaseException(msg);
}
beginCriteria(queryBuilder);
switch (operator) {
......@@ -308,6 +308,29 @@ public class AtlasSolrQueryBuilder {
return attributeValue;
}
private String getIndexAttributeName(AtlasEntityType type, String attrName) throws AtlasBaseException {
AtlasAttribute ret = type.getAttribute(attrName);
if (ret == null) {
String msg = String.format("Received unknown attribute '%s' for type '%s'.", attrName, type.getTypeName());
LOG.error(msg);
throw new AtlasBaseException(msg);
}
String indexFieldName = ret.getIndexFieldName();
if (indexFieldName == null) {
String msg = String.format("Received non-index attribute %s for type %s.", attrName, type.getTypeName());
LOG.error(msg);
throw new AtlasBaseException(msg);
}
return indexFieldName;
}
private void beginCriteria(StringBuilder queryBuilder) {
queryBuilder.append("( ");
......
......@@ -33,8 +33,10 @@ import org.testng.annotations.Test;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.mockito.Mockito.when;
......@@ -45,6 +47,9 @@ public class AtlasSolrQueryBuilderTest {
private AtlasEntityType hiveTableEntityTypeMock;
@Mock
private AtlasEntityType hiveTableEntityTypeMock2;
@Mock
private AtlasStructType.AtlasAttribute nameAttributeMock;
@Mock
......@@ -95,6 +100,7 @@ public class AtlasSolrQueryBuilderTest {
when(hiveTableEntityTypeMock.getTypeName()).thenReturn("hive_table");
when(hiveTableEntityTypeMock2.getTypeName()).thenReturn("hive_db");
when(nameAttributeMock.getIndexFieldName()).thenReturn("name_index");
when(commentAttributeMock.getIndexFieldName()).thenReturn("comment_index");
......@@ -114,7 +120,7 @@ public class AtlasSolrQueryBuilderTest {
processSearchParameters(fileName, underTest);
Assert.assertEquals(underTest.build(), "+t AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +name_index:t10 ) OR ( +comment_index:*t10* ) )");
Assert.assertEquals(underTest.build(), "+t AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +name_index:t10 ) OR ( +comment_index:*t10* ) )");
}
@Test
......@@ -124,7 +130,7 @@ public class AtlasSolrQueryBuilderTest {
processSearchParameters(fileName, underTest);
Assert.assertEquals(underTest.build(), "+t AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +name_index:t10 ) )");
Assert.assertEquals(underTest.build(), "+t AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +name_index:t10 ) )");
}
@Test
......@@ -134,7 +140,7 @@ public class AtlasSolrQueryBuilderTest {
processSearchParameters(fileName, underTest);
Assert.assertEquals(underTest.build(), "+t AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +name_index:t10 ) AND ( +comment_index:*t10* ) )");
Assert.assertEquals(underTest.build(), "+t AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +name_index:t10 ) AND ( +comment_index:*t10* ) )");
}
@Test
......@@ -144,7 +150,7 @@ public class AtlasSolrQueryBuilderTest {
processSearchParameters(fileName, underTest);
Assert.assertEquals(underTest.build(), "+t AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +name_index:t10 ) )");
Assert.assertEquals(underTest.build(), "+t AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +name_index:t10 ) )");
}
@Test
......@@ -154,7 +160,7 @@ public class AtlasSolrQueryBuilderTest {
processSearchParameters(fileName, underTest);
Assert.assertEquals(underTest.build(), "+t AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( +name_index:t10 )");
Assert.assertEquals(underTest.build(), "+t AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( +name_index:t10 )");
}
@Test
......@@ -164,7 +170,7 @@ public class AtlasSolrQueryBuilderTest {
processSearchParameters(fileName, underTest);
Assert.assertEquals(underTest.build(), "+t10 AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +comment_index:*United States* ) AND ( +descrption__index:*nothing* ) AND ( +name_index:*t100* ) )");
Assert.assertEquals(underTest.build(), "+t10 AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +comment_index:*United States* ) AND ( +descrption__index:*nothing* ) AND ( +name_index:*t100* ) )");
}
@Test
......@@ -174,7 +180,7 @@ public class AtlasSolrQueryBuilderTest {
processSearchParameters(fileName, underTest);
Assert.assertEquals(underTest.build(), "+t10 AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +created__index:{ 100 TO * ] ) )");
Assert.assertEquals(underTest.build(), "+t10 AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +created__index:{ 100 TO * ] ) )");
}
@Test
......@@ -184,7 +190,7 @@ public class AtlasSolrQueryBuilderTest {
processSearchParameters(fileName, underTest);
Assert.assertEquals(underTest.build(), "+t10 AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +created__index:[ 100 TO * ] ) AND ( +started__index:[ 100 TO * ] ) )");
Assert.assertEquals(underTest.build(), "+t10 AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +created__index:[ 100 TO * ] ) AND ( +started__index:[ 100 TO * ] ) )");
}
@Test
......@@ -194,7 +200,7 @@ public class AtlasSolrQueryBuilderTest {
processSearchParameters(fileName, underTest);
Assert.assertEquals(underTest.build(), "+t10 AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +created__index:[ * TO100} ) )");
Assert.assertEquals(underTest.build(), "+t10 AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +created__index:[ * TO100} ) )");
}
@Test
......@@ -204,7 +210,7 @@ public class AtlasSolrQueryBuilderTest {
processSearchParameters(fileName, underTest);
Assert.assertEquals(underTest.build(), "+t10 AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +created__index:[ * TO 100 ] ) AND ( +started__index:[ * TO 100 ] ) )");
Assert.assertEquals(underTest.build(), "+t10 AND -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +created__index:[ * TO 100 ] ) AND ( +started__index:[ * TO 100 ] ) )");
}
@Test
......@@ -214,9 +220,18 @@ public class AtlasSolrQueryBuilderTest {
processSearchParameters(fileName, underTest);
Assert.assertEquals(underTest.build(), " -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +qualifiedName__index:testdb.t1* ) )");
Assert.assertEquals(underTest.build(), " -__state_index:DELETED AND +__typeName__index:(hive_table ) AND ( ( +qualifiedName__index:testdb.t1* ) )");
}
@Test
public void testGenerateSolrQueryString2TypeNames() throws IOException, AtlasBaseException {
final String fileName = "src/test/resources/searchparameters2Types.json";
AtlasSolrQueryBuilder underTest = new AtlasSolrQueryBuilder();
processSearchParametersForMultipleTypeNames(fileName, underTest);
Assert.assertEquals(underTest.build(), "+t AND -__state_index:DELETED AND +__typeName__index:(hive_table hive_db ) ");
}
......@@ -243,8 +258,23 @@ public class AtlasSolrQueryBuilderTest {
ObjectMapper mapper = new ObjectMapper();
SearchParameters searchParameters = mapper.readValue(new FileInputStream(fileName), SearchParameters.class);
Set<AtlasEntityType> hiveTableEntityTypeMocks = new HashSet<>();
hiveTableEntityTypeMocks.add(hiveTableEntityTypeMock);
underTest.withEntityTypes(hiveTableEntityTypeMocks)
.withQueryString(searchParameters.getQuery())
.withCriteria(searchParameters.getEntityFilters())
.withExcludedDeletedEntities(searchParameters.getExcludeDeletedEntities())
.withCommonIndexFieldNames(indexFieldNamesMap);
}
private void processSearchParametersForMultipleTypeNames(String fileName, AtlasSolrQueryBuilder underTest) throws IOException, AtlasBaseException {
ObjectMapper mapper = new ObjectMapper();
SearchParameters searchParameters = mapper.readValue(new FileInputStream(fileName), SearchParameters.class);
underTest.withEntityType(hiveTableEntityTypeMock)
Set<AtlasEntityType> hiveTableEntityTypeMocks = new HashSet<>();
hiveTableEntityTypeMocks.add(hiveTableEntityTypeMock);
hiveTableEntityTypeMocks.add(hiveTableEntityTypeMock2);
underTest.withEntityTypes(hiveTableEntityTypeMocks)
.withQueryString(searchParameters.getQuery())
.withCriteria(searchParameters.getEntityFilters())
.withExcludedDeletedEntities(searchParameters.getExcludeDeletedEntities())
......
{
"excludeDeletedEntities":true,
"includeSubClassifications":true,
"includeSubTypes":true,
"includeClassificationAttributes":true,
"entityFilters":{
},
"tagFilters":null,
"attributes":[
"comment"
],
"query":"t",
"limit":25,
"offset":0,
"typeName":"hive_table,hive_db",
"classification":null,
"termName":null
}
......@@ -67,17 +67,16 @@ public class ClassificationSearchProcessor extends SearchProcessor {
public ClassificationSearchProcessor(SearchContext context) {
super(context);
final AtlasClassificationType classificationType = context.getClassificationType();
final FilterCriteria filterCriteria = context.getSearchParameters().getTagFilters();
final Set<String> indexAttributes = new HashSet<>();
final Set<String> graphAttributes = new HashSet<>();
final Set<String> allAttributes = new HashSet<>();
final Set<String> typeAndSubTypes = context.getClassificationTypes();
final Set<String> typeAndSubTypes = context.getClassificationTypeNames();
final String typeAndSubTypesQryStr = context.getClassificationTypesQryStr();
final boolean isBuiltInType = context.isBuiltInClassificationType();
final boolean isWildcardSearch = context.isWildCardSearch();
final Set<AtlasClassificationType> classificationTypes = context.getClassificationTypes();
processSearchAttributes(classificationType, filterCriteria, indexAttributes, graphAttributes, allAttributes);
processSearchAttributes(classificationTypes, filterCriteria, indexAttributes, graphAttributes, allAttributes);
/* for classification search, if any attribute can't be handled by index query - switch to all filter by Graph query
There are four cases in the classification type :
......@@ -87,19 +86,25 @@ public class ClassificationSearchProcessor extends SearchProcessor {
4. classification is not present in the search parameter
each of above cases with either has empty/or not tagFilters
*/
final boolean useIndexSearchForEntity = (classificationType != null || isWildcardSearch) &&
final boolean useIndexSearchForEntity = (CollectionUtils.isNotEmpty(classificationTypes) || isWildcardSearch) &&
!context.hasAttributeFilter(filterCriteria) &&
(typeAndSubTypesQryStr.length() <= MAX_QUERY_STR_LENGTH_TAGS);
/* If classification's attributes can be applied index filter, we can use direct index
* to query classification index as well.
*/
final boolean useIndexSearchForClassification = (!isBuiltInType && !isWildcardSearch) &&
final boolean useIndexSearchForClassification = (CollectionUtils.isNotEmpty(classificationTypes) &&
classificationTypes.iterator().next() != SearchContext.MATCH_ALL_NOT_CLASSIFIED &&
!isWildcardSearch) &&
(typeAndSubTypesQryStr.length() <= MAX_QUERY_STR_LENGTH_TAGS) &&
CollectionUtils.isNotEmpty(indexAttributes) &&
canApplyIndexFilter(classificationType, filterCriteria, false);
canApplyIndexFilter(classificationTypes, filterCriteria, false);
traitPredicate = buildTraitPredict(classificationType);
final boolean useGraphSearchForClassification = (CollectionUtils.isNotEmpty(classificationTypes) &&
classificationTypes.iterator().next() != SearchContext.MATCH_ALL_NOT_CLASSIFIED &&
!isWildcardSearch && CollectionUtils.isNotEmpty(graphAttributes));
traitPredicate = buildTraitPredict(classificationTypes);
isEntityPredicate = SearchPredicateUtil.generateIsEntityVertexPredicate(context.getTypeRegistry());
AtlasGraph graph = context.getGraph();
......@@ -115,8 +120,7 @@ public class ClassificationSearchProcessor extends SearchProcessor {
// tagFilters is not allowed in wildcard search
graphIndexQueryBuilder.addClassificationTypeFilter(queryString);
} else {
if (isBuiltInType) {
if (classificationTypes.iterator().next() == SearchContext.MATCH_ALL_NOT_CLASSIFIED) {
// tagFilters is not allowed in unique classificationType search
graphIndexQueryBuilder.addClassificationFilterForBuiltInTypes(queryString);
......@@ -146,7 +150,7 @@ public class ClassificationSearchProcessor extends SearchProcessor {
graphIndexQueryBuilder.addActiveStateQueryFilter(queryString);
graphIndexQueryBuilder.addTypeAndSubTypesQueryFilter(queryString, typeAndSubTypesQryStr);
constructFilterQuery(queryString, classificationType, filterCriteria, indexAttributes);
constructFilterQuery(queryString, classificationTypes, filterCriteria, indexAttributes);
String indexQueryString = STRAY_AND_PATTERN.matcher(queryString).replaceAll(")");
indexQueryString = STRAY_OR_PATTERN.matcher(indexQueryString).replaceAll(")");
......@@ -158,7 +162,7 @@ public class ClassificationSearchProcessor extends SearchProcessor {
inMemoryPredicate = inMemoryPredicate == null ? typeNamePredicate : PredicateUtils.andPredicate(inMemoryPredicate, typeNamePredicate);
}
Predicate attributePredicate = constructInMemoryPredicate(classificationType, filterCriteria, indexAttributes);
Predicate attributePredicate = constructInMemoryPredicate(classificationTypes, filterCriteria, indexAttributes);
if (attributePredicate != null) {
inMemoryPredicate = inMemoryPredicate == null ? attributePredicate : PredicateUtils.andPredicate(inMemoryPredicate, attributePredicate);
......@@ -170,7 +174,7 @@ public class ClassificationSearchProcessor extends SearchProcessor {
}
// only registered classification will search with tag filters
if (!isWildcardSearch && !isBuiltInType && !graphAttributes.isEmpty()) {
if (useGraphSearchForClassification) {
AtlasGremlinQueryProvider queryProvider = AtlasGremlinQueryProvider.INSTANCE;
AtlasGraphQuery query = graph.query();
......@@ -179,7 +183,7 @@ public class ClassificationSearchProcessor extends SearchProcessor {
query.in(Constants.TYPE_NAME_PROPERTY_KEY, typeAndSubTypes);
}
tagGraphQueryWithAttributes = toGraphFilterQuery(classificationType, filterCriteria, allAttributes, query);
tagGraphQueryWithAttributes = toGraphFilterQuery(classificationTypes, filterCriteria, allAttributes, query);
gremlinQueryBindings = new HashMap<>();
StringBuilder gremlinQuery = new StringBuilder();
......@@ -188,7 +192,7 @@ public class ClassificationSearchProcessor extends SearchProcessor {
gremlinQuery.append(".as('e').filter(out()");
gremlinQuery.append(queryProvider.getQuery(AtlasGremlinQueryProvider.AtlasGremlinQuery.BASIC_SEARCH_TYPE_FILTER));
constructGremlinFilterQuery(gremlinQuery, gremlinQueryBindings, context.getClassificationType(), context.getSearchParameters().getTagFilters());
// constructGremlinFilterQuery(gremlinQuery, gremlinQueryBindings, context.getClassificationType(), context.getSearchParameters().getTagFilters());
// After filtering on tags go back to e and output the list of entity vertices
gremlinQuery.append(").toList()");
......
......@@ -486,9 +486,11 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
resultAttributes.addAll(searchContext.getEntityAttributes());
}
AtlasEntityType entityType = searchContext.getEntityType();
if (entityType != null) {
for (String resultAttribute : resultAttributes) {
if (CollectionUtils.isNotEmpty(searchContext.getEntityTypes())) {
AtlasEntityType entityType = searchContext.getEntityTypes().iterator().next();
for (String resultAttribute : resultAttributes) {
AtlasAttribute attribute = entityType.getAttribute(resultAttribute);
if (attribute == null) {
......
......@@ -31,27 +31,15 @@ import org.apache.atlas.utils.AtlasPerfTracer;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.Predicate;
import org.apache.commons.collections.PredicateUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.tinkerpop.gremlin.process.traversal.Order;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.*;
import java.util.stream.StreamSupport;
import static org.apache.atlas.SortOrder.ASCENDING;
import static org.apache.atlas.discovery.SearchContext.MATCH_ALL_CLASSIFICATION_TYPES;
import static org.apache.atlas.discovery.SearchContext.MATCH_ALL_CLASSIFIED;
import static org.apache.atlas.discovery.SearchContext.MATCH_ALL_NOT_CLASSIFIED;
import static org.apache.atlas.discovery.SearchContext.MATCH_ALL_WILDCARD_CLASSIFICATION;
import static org.apache.atlas.repository.Constants.PROPAGATED_TRAIT_NAMES_PROPERTY_KEY;
import static org.apache.atlas.repository.Constants.TRAIT_NAMES_PROPERTY_KEY;
import static org.apache.atlas.repository.Constants.TYPE_NAME_PROPERTY_KEY;
import static org.apache.atlas.discovery.SearchContext.*;
import static org.apache.atlas.repository.Constants.*;
import static org.apache.atlas.repository.graphdb.AtlasGraphQuery.ComparisionOperator.EQUAL;
import static org.apache.atlas.repository.graphdb.AtlasGraphQuery.ComparisionOperator.NOT_EQUAL;
import static org.apache.atlas.repository.graphdb.AtlasGraphQuery.SortOrder.ASC;
......@@ -69,28 +57,28 @@ public class EntitySearchProcessor extends SearchProcessor {
public EntitySearchProcessor(SearchContext context) {
super(context);
final AtlasEntityType entityType = context.getEntityType();
final Set<AtlasEntityType> entityTypes = context.getEntityTypes();
final FilterCriteria filterCriteria = context.getSearchParameters().getEntityFilters();
final Set<String> indexAttributes = new HashSet<>();
final Set<String> graphAttributes = new HashSet<>();
final Set<String> allAttributes = new HashSet<>();
final Set<String> typeAndSubTypes = context.getEntityTypes();
final Set<String> typeAndSubTypes = context.getEntityTypeNames();
final String typeAndSubTypesQryStr = context.getEntityTypesQryStr();
final String sortBy = context.getSearchParameters().getSortBy();
final SortOrder sortOrder = context.getSearchParameters().getSortOrder();
final AtlasClassificationType classificationType = context.getClassificationType();
final Set<String> classificationTypeAndSubTypes = context.getClassificationTypes();
final boolean filterClassification;
final Set<AtlasClassificationType> classificationTypes = context.getClassificationTypes();
final Set<String> classificationTypeAndSubTypes = context.getClassificationTypeNames();
final boolean filterClassification;
if (classificationType != null) {
if (CollectionUtils.isNotEmpty(classificationTypes)) {
filterClassification = !context.needClassificationProcessor();
} else {
filterClassification = false;
}
final Predicate typeNamePredicate;
final Predicate traitPredicate = buildTraitPredict(classificationType);
final Predicate traitPredicate = buildTraitPredict(classificationTypes);
final Predicate activePredicate = SearchPredicateUtil.getEQPredicateGenerator()
.generatePredicate(Constants.STATE_PROPERTY_KEY, "ACTIVE", String.class);
......@@ -101,10 +89,10 @@ public class EntitySearchProcessor extends SearchProcessor {
typeNamePredicate = SearchPredicateUtil.generateIsEntityVertexPredicate(context.getTypeRegistry());
}
processSearchAttributes(entityType, filterCriteria, indexAttributes, graphAttributes, allAttributes);
processSearchAttributes(entityTypes, filterCriteria, indexAttributes, graphAttributes, allAttributes);
final boolean typeSearchByIndex = !filterClassification && typeAndSubTypesQryStr.length() <= MAX_QUERY_STR_LENGTH_TYPES;
final boolean attrSearchByIndex = !filterClassification && CollectionUtils.isNotEmpty(indexAttributes) && canApplyIndexFilter(entityType, filterCriteria, false);
final boolean attrSearchByIndex = !filterClassification && CollectionUtils.isNotEmpty(indexAttributes) && canApplyIndexFilter(entityTypes, filterCriteria, false);
StringBuilder indexQuery = new StringBuilder();
......@@ -116,9 +104,9 @@ public class EntitySearchProcessor extends SearchProcessor {
}
if (attrSearchByIndex) {
constructFilterQuery(indexQuery, entityType, filterCriteria, indexAttributes);
constructFilterQuery(indexQuery, entityTypes, filterCriteria, indexAttributes);
Predicate attributePredicate = constructInMemoryPredicate(entityType, filterCriteria, indexAttributes);
Predicate attributePredicate = constructInMemoryPredicate(entityTypes, filterCriteria, indexAttributes);
if (attributePredicate != null) {
inMemoryPredicate = PredicateUtils.andPredicate(inMemoryPredicate, attributePredicate);
}
......@@ -149,6 +137,7 @@ public class EntitySearchProcessor extends SearchProcessor {
// If we need to filter on the trait names then we need to build the query and equivalent in-memory predicate
if (filterClassification) {
AtlasClassificationType classificationType = classificationTypes.iterator().next();
List<AtlasGraphQuery> orConditions = new LinkedList<>();
if (classificationType == MATCH_ALL_WILDCARD_CLASSIFICATION || classificationType == MATCH_ALL_CLASSIFIED || classificationType == MATCH_ALL_CLASSIFICATION_TYPES) {
......@@ -176,10 +165,10 @@ public class EntitySearchProcessor extends SearchProcessor {
}
}
graphQuery = toGraphFilterQuery(entityType, filterCriteria, graphAttributes, query);
graphQuery = toGraphFilterQuery(entityTypes, filterCriteria, graphAttributes, query);
// Prepare in-memory predicate for attribute filtering
Predicate attributePredicate = constructInMemoryPredicate(entityType, filterCriteria, graphAttributes);
Predicate attributePredicate = constructInMemoryPredicate(entityTypes, filterCriteria, graphAttributes);
if (attributePredicate != null) {
if (graphQueryPredicate != null) {
......@@ -199,7 +188,8 @@ public class EntitySearchProcessor extends SearchProcessor {
}
}
if (sortBy != null && !sortBy.isEmpty()) {
AtlasAttribute sortByAttribute = context.getEntityType().getAttribute(sortBy);
final AtlasEntityType entityType = context.getEntityTypes().iterator().next();
AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy);
if (sortByAttribute != null) {
AtlasGraphQuery.SortOrder qrySortOrder = sortOrder == SortOrder.ASCENDING ? ASC : DESC;
......@@ -215,8 +205,7 @@ public class EntitySearchProcessor extends SearchProcessor {
// Prepare the graph query and in-memory filter for the filtering phase
filterGraphQueryPredicate = typeNamePredicate;
Predicate attributesPredicate = constructInMemoryPredicate(entityType, filterCriteria, allAttributes);
Predicate attributesPredicate = constructInMemoryPredicate(entityTypes, filterCriteria, allAttributes);
if (attributesPredicate != null) {
filterGraphQueryPredicate = filterGraphQueryPredicate == null ? attributesPredicate :
......@@ -265,7 +254,7 @@ public class EntitySearchProcessor extends SearchProcessor {
SortOrder sortOrder = context.getSearchParameters().getSortOrder();
String sortBy = context.getSearchParameters().getSortBy();
final AtlasEntityType entityType = context.getEntityType();
final AtlasEntityType entityType = context.getEntityTypes().iterator().next();
AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy);
if (sortByAttribute == null) {
sortBy = null;
......
......@@ -18,7 +18,6 @@
package org.apache.atlas.discovery;
import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.repository.graphdb.*;
......@@ -53,13 +52,13 @@ public class FreeTextSearchProcessor extends SearchProcessor {
queryString.append(searchParameters.getQuery());
if (CollectionUtils.isNotEmpty(context.getEntityTypes()) && context.getEntityTypesQryStr().length() <= MAX_QUERY_STR_LENGTH_TYPES) {
if (CollectionUtils.isNotEmpty(context.getEntityTypeNames()) && context.getEntityTypesQryStr().length() <= MAX_QUERY_STR_LENGTH_TYPES) {
queryString.append(AND_STR).append(context.getEntityTypesQryStr());
}
graphIndexQueryBuilder.addActiveStateQueryFilter(queryString);
if (CollectionUtils.isNotEmpty(context.getClassificationTypes()) && context.getClassificationTypesQryStr().length() <= MAX_QUERY_STR_LENGTH_TYPES) {
if (CollectionUtils.isNotEmpty(context.getClassificationTypeNames()) && context.getClassificationTypesQryStr().length() <= MAX_QUERY_STR_LENGTH_TYPES) {
queryString.append(AND_STR).append(context.getClassificationTypesQryStr());
}
......@@ -138,7 +137,7 @@ public class FreeTextSearchProcessor extends SearchProcessor {
continue;
}
if (context.getClassificationType() != null) {
if (CollectionUtils.isNotEmpty(context.getClassificationTypes())) {
List<String> entityClassifications = GraphHelper.getAllTraitNames(vertex);
if (!context.includeClassificationTypes(entityClassifications)) {
......
......@@ -24,6 +24,7 @@ import org.apache.atlas.repository.graphdb.AtlasIndexQuery;
import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2;
import org.apache.atlas.utils.AtlasPerfTracer;
import org.apache.commons.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -31,10 +32,7 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import static org.apache.atlas.discovery.SearchContext.MATCH_ALL_CLASSIFIED;
import static org.apache.atlas.discovery.SearchContext.MATCH_ALL_ENTITY_TYPES;
import static org.apache.atlas.discovery.SearchContext.MATCH_ALL_NOT_CLASSIFIED;
import static org.apache.atlas.discovery.SearchContext.MATCH_ALL_WILDCARD_CLASSIFICATION;
public class FullTextSearchProcessor extends SearchProcessor {
......@@ -53,29 +51,28 @@ public class FullTextSearchProcessor extends SearchProcessor {
// if search includes entity-type criteria, adding a filter here can help avoid unnecessary
// processing (and rejection) by subsequent EntitySearchProcessor
if (context.getEntityType() != null && context.getEntityType() != MATCH_ALL_ENTITY_TYPES) {
String typeAndSubTypeNamesQryStr = context.getEntityType().getTypeAndAllSubTypesQryStr();
if (CollectionUtils.isNotEmpty(context.getEntityTypes())) {
String typeAndSubTypeNamesQryStr = context.getEntityTypesQryStr();
if (typeAndSubTypeNamesQryStr.length() <= MAX_QUERY_STR_LENGTH_TYPES) {
queryString.append(AND_STR).append(typeAndSubTypeNamesQryStr);
} else {
LOG.warn("'{}' has too many subtypes (query-string-length={}) to include in index-query; might cause poor performance",
context.getEntityType().getTypeName(), typeAndSubTypeNamesQryStr.length());
searchParameters.getTypeName(), typeAndSubTypeNamesQryStr.length());
}
}
// if search includes classification criteria, adding a filter here can help avoid unnecessary
// processing (and rejection) by subsequent ClassificationSearchProcessor or EntitySearchProcessor
if (context.getClassificationType() != null && context.getClassificationType() != MATCH_ALL_WILDCARD_CLASSIFICATION &&
context.getClassificationType() != MATCH_ALL_CLASSIFIED &&
context.getClassificationType() != MATCH_ALL_NOT_CLASSIFIED) {
String typeAndSubTypeNamesStr = context.getClassificationType().getTypeAndAllSubTypesQryStr();
if (CollectionUtils.isNotEmpty(context.getClassificationTypes()) &&
context.getClassificationTypes().iterator().next() != MATCH_ALL_NOT_CLASSIFIED) {
String typeAndSubTypeNamesStr = context.getClassificationTypesQryStr();
if (typeAndSubTypeNamesStr.length() <= MAX_QUERY_STR_LENGTH_TAGS) {
queryString.append(AND_STR).append(typeAndSubTypeNamesStr);
} else {
LOG.warn("'{}' has too many subtypes (query-string-length={}) to include in index-query; might cause poor performance",
context.getClassificationType().getTypeName(), typeAndSubTypeNamesStr.length());
searchParameters.getClassification(), typeAndSubTypeNamesStr.length());
}
}
......
......@@ -26,6 +26,8 @@ import static org.apache.atlas.repository.Constants.PROPAGATED_CLASSIFICATION_NA
import static org.apache.atlas.repository.Constants.STATE_PROPERTY_KEY;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.type.AtlasStructType;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
public class GraphIndexQueryBuilder {
......@@ -36,20 +38,20 @@ public class GraphIndexQueryBuilder {
}
void addClassificationTypeFilter(StringBuilder indexQuery) {
if (indexQuery != null && StringUtils.isNotEmpty(context.getSearchParameters().getClassification())) {
String classificationName = context.getSearchParameters().getClassification();
if (indexQuery != null && CollectionUtils.isNotEmpty(context.getClassificationNames())) {
String classificationNames = AtlasStructType.AtlasAttribute.escapeIndexQueryValue(context.getClassificationNames());
if (indexQuery.length() != 0) {
indexQuery.append(" AND ");
}
indexQuery.append("(").append(INDEX_SEARCH_PREFIX).append('\"').append(CLASSIFICATION_NAMES_KEY).append('\"').append(':').append(classificationName)
indexQuery.append("(").append(INDEX_SEARCH_PREFIX).append('\"').append(CLASSIFICATION_NAMES_KEY).append('\"').append(':').append(classificationNames)
.append(" OR ").append(INDEX_SEARCH_PREFIX).append('\"').append(PROPAGATED_CLASSIFICATION_NAMES_KEY)
.append('\"').append(':').append(classificationName).append(")");
.append('\"').append(':').append(classificationNames).append(")");
}
}
void addClassificationAndSubTypesQueryFilter(StringBuilder indexQuery) {
if (indexQuery != null && StringUtils.isNotEmpty(context.getSearchParameters().getClassification())) {
if (indexQuery != null && CollectionUtils.isNotEmpty(context.getClassificationTypes())) {
String classificationTypesQryStr = context.getClassificationTypesQryStr();
if (indexQuery.length() != 0) {
......@@ -63,17 +65,8 @@ public class GraphIndexQueryBuilder {
}
void addClassificationFilterForBuiltInTypes(StringBuilder indexQuery) {
if (indexQuery != null && context.getClassificationType() != null) {
if (context.getClassificationType() == MATCH_ALL_WILDCARD_CLASSIFICATION || context.getClassificationType() == MATCH_ALL_CLASSIFIED) {
if (indexQuery.length() != 0) {
indexQuery.append(" AND ");
}
indexQuery.append("(").append(INDEX_SEARCH_PREFIX).append("\"")
.append(CLASSIFICATION_NAMES_KEY).append("\"").append(":" + "[* TO *]")
.append(" OR ").append(INDEX_SEARCH_PREFIX).append("\"")
.append(PROPAGATED_CLASSIFICATION_NAMES_KEY).append("\"").append(":" + "[* TO *]").append(")");
} else if (context.getClassificationType() == MATCH_ALL_NOT_CLASSIFIED) {
if (indexQuery != null && CollectionUtils.isNotEmpty(context.getClassificationTypes())) {
if (context.getClassificationTypes().iterator().next() == MATCH_ALL_NOT_CLASSIFIED) {
if (indexQuery.length() != 0) {
indexQuery.append(" AND ");
}
......
......@@ -56,12 +56,7 @@ public class SearchAggregatorImpl implements SearchAggregator {
try {
AtlasGraphIndexClient graphIndexClient = graph.getGraphIndexClient();
String searchedOnTypeName = searchParameters.getTypeName();
AtlasEntityType searchForEntityType = null;
if (searchedOnTypeName != null) {
searchForEntityType = typeRegistry.getEntityTypeByName(searchedOnTypeName);
}
Set<AtlasEntityType> searchForEntityType = searchContext.getEntityTypes();
Map<String, String> indexFieldNameCache = new HashMap<>();
......
......@@ -130,7 +130,7 @@ public class EntitySearchProcessorTest extends BasicTestSetup {
new EntitySearchProcessor(context);
}
@Test
@Test(priority = -1)
public void searchWithNEQ_stringAttr() throws AtlasBaseException {
String expectedEntityName = "hive_Table_Null_tableType";
createDummyEntity(expectedEntityName,HIVE_TABLE_TYPE);
......@@ -197,4 +197,116 @@ public class EntitySearchProcessorTest extends BasicTestSetup {
assertTrue(nameList.contains("hive_Table_Null_tableType"));
}
@Test
public void ALLEntityType() throws AtlasBaseException {
SearchParameters params = new SearchParameters();
params.setTypeName(SearchParameters.ALL_ENTITY_TYPES);
params.setLimit(20);
SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet());
EntitySearchProcessor processor = new EntitySearchProcessor(context);
assertEquals(processor.execute().size(), 20);
}
@Test
public void ALLEntityTypeWithTag() throws AtlasBaseException {
SearchParameters params = new SearchParameters();
params.setTypeName(SearchParameters.ALL_ENTITY_TYPES);
params.setClassification(FACT_CLASSIFICATION);
params.setLimit(20);
SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet());
EntitySearchProcessor processor = new EntitySearchProcessor(context);
assertEquals(processor.execute().size(), 5);
}
@Test
public void entityType() throws AtlasBaseException {
SearchParameters params = new SearchParameters();
params.setTypeName(DATABASE_TYPE);
params.setLimit(20);
SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet());
EntitySearchProcessor processor = new EntitySearchProcessor(context);
assertEquals(processor.execute().size(), 3);
}
@Test
public void entityTypes() throws AtlasBaseException {
SearchParameters params = new SearchParameters();
params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE);
params.setLimit(20);
SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet());
EntitySearchProcessor processor = new EntitySearchProcessor(context);
assertEquals(processor.execute().size(), 14);
}
@Test(expectedExceptions = AtlasBaseException.class, expectedExceptionsMessageRegExp = "Not_Exists: Unknown/invalid typename")
public void entityTypesNotAllowed() throws AtlasBaseException {
SearchParameters params = new SearchParameters();
params.setTypeName(DATABASE_TYPE+",Not_Exists");
params.setLimit(20);
SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet());
}
@Test(expectedExceptions = AtlasBaseException.class, expectedExceptionsMessageRegExp = "Attribute tableType not found for type "+DATABASE_TYPE)
public void entityFiltersNotAllowed() throws AtlasBaseException {
SearchParameters params = new SearchParameters();
params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE);
SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("tableType", SearchParameters.Operator.CONTAINS, "ETL");
params.setEntityFilters(filterCriteria);
params.setLimit(20);
SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet());
}
@Test
public void entityTypesAndTag() throws AtlasBaseException {
SearchParameters params = new SearchParameters();
params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE);
params.setClassification(FACT_CLASSIFICATION);
params.setLimit(20);
SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet());
EntitySearchProcessor processor = new EntitySearchProcessor(context);
assertEquals(processor.execute().size(), 3);
}
@Test
public void searchWithEntityTypesAndEntityFilters() throws AtlasBaseException {
SearchParameters params = new SearchParameters();
params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE);
SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("owner", SearchParameters.Operator.CONTAINS, "ETL");
params.setEntityFilters(filterCriteria);
params.setLimit(20);
SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet());
EntitySearchProcessor processor = new EntitySearchProcessor(context);
assertEquals(processor.execute().size(), 4);
}
@Test
public void searchWithEntityTypesAndEntityFiltersAndTag() throws AtlasBaseException {
SearchParameters params = new SearchParameters();
params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE);
SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("owner", SearchParameters.Operator.CONTAINS, "ETL");
params.setEntityFilters(filterCriteria);
params.setClassification(LOGDATA_CLASSIFICATION);
params.setLimit(20);
SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet());
EntitySearchProcessor processor = new EntitySearchProcessor(context);
assertEquals(processor.execute().size(), 2);
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment