Commit 96da2306 by Sarath Subramanian Committed by Madhan Neethiraj

ATLAS-2216: Remove Catalog/Taxonomy feature from Atlas

parent a9928f90
{
"enumDefs": [],
"structDefs": [],
"classificationDefs": [
{
"name": "TaxonomyTerm",
"superTypes": [],
"typeVersion": "1.0",
"attributeDefs": [
{
"name": "atlas.taxonomy",
"typeName": "string",
"cardinality": "SINGLE",
"isIndexable": false,
"isOptional": true,
"isUnique": false
}
]
}
],
"classificationDefs": [],
"entityDefs": [
{
"name": "Referenceable",
......
......@@ -19,5 +19,5 @@
package org.apache.atlas.authorize;
public enum AtlasResourceTypes {
UNKNOWN, ENTITY, TYPE, OPERATION, TAXONOMY, TERM, RELATIONSHIP
UNKNOWN, ENTITY, TYPE, OPERATION, RELATIONSHIP
}
......@@ -111,9 +111,6 @@ public class AtlasAuthorizationUtils {
* entities,lineage and discovery apis are mapped with AtlasResourceTypes.ENTITY eg :- /api/atlas/lineage/hive/table/*
* /api/atlas/entities/{guid}* /api/atlas/discovery/*
*
* taxonomy API are also mapped to AtlasResourceTypes.TAXONOMY & AtlasResourceTypes.ENTITY and its terms APIs have
* added AtlasResourceTypes.TERM associations.
*
* unprotected types are mapped with AtlasResourceTypes.UNKNOWN, access to these are allowed.
*/
public static Set<AtlasResourceTypes> getAtlasResourceType(String contextPath) {
......@@ -132,13 +129,6 @@ public class AtlasAuthorizationUtils {
} else if (api.startsWith("entities") || api.startsWith("lineage") ||
api.startsWith("discovery") || api.startsWith("entity") || api.startsWith("search")) {
resourceTypes.add(AtlasResourceTypes.ENTITY);
} else if (api.startsWith("taxonomies")) {
resourceTypes.add(AtlasResourceTypes.TAXONOMY);
// taxonomies are modeled as entities
resourceTypes.add(AtlasResourceTypes.ENTITY);
if (contextPath.contains("/terms")) {
resourceTypes.add(AtlasResourceTypes.TERM);
}
} else if (api.startsWith("relationship")) {
resourceTypes.add(AtlasResourceTypes.RELATIONSHIP);
} else {
......
......@@ -226,10 +226,6 @@ public class PolicyParser {
resourceType = AtlasResourceTypes.OPERATION;
} else if (type.equalsIgnoreCase("TYPE")) {
resourceType = AtlasResourceTypes.TYPE;
} else if (type.equalsIgnoreCase("TAXONOMY")) {
resourceType = AtlasResourceTypes.TAXONOMY;
} else if (type.equalsIgnoreCase("TERM")) {
resourceType = AtlasResourceTypes.TERM;
} else if (type.equalsIgnoreCase("RELATIONSHIP")) {
resourceType = AtlasResourceTypes.RELATIONSHIP;
} else {
......
......@@ -92,19 +92,6 @@ public class AtlasAuthorizationUtilsTest {
assertEquals(resourceTypes.size(), 1);
assertTrue(resourceTypes.contains(AtlasResourceTypes.ENTITY));
contextPath = "/api/atlas/v1/taxonomies";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 2);
assertTrue(resourceTypes.contains(AtlasResourceTypes.TAXONOMY));
assertTrue(resourceTypes.contains(AtlasResourceTypes.ENTITY));
contextPath = "/api/atlas/v1/taxonomies/taxonomy1/terms";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 3);
assertTrue(resourceTypes.contains(AtlasResourceTypes.TAXONOMY));
assertTrue(resourceTypes.contains(AtlasResourceTypes.ENTITY));
assertTrue(resourceTypes.contains(AtlasResourceTypes.TERM));
contextPath = "/api/atlas/v1/entities/111";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 1);
......
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.atlas</groupId>
<artifactId>apache-atlas</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>atlas-catalog</artifactId>
<description>Apache Atlas Business Catalog Module</description>
<name>Apache Atlas Business Catalog</name>
<packaging>jar</packaging>
<properties>
<tinkerpop.version>2.6.0</tinkerpop.version>
<titan.version>0.5.4</titan.version>
<lucene.version>4.8.1</lucene.version>
</properties>
<profiles>
<profile>
<id>titan1</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<properties>
<lucene.version>4.10.4</lucene.version>
</properties>
</profile>
</profiles>
<dependencies>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-repository</artifactId>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-typesystem</artifactId>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-server-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-graphdb-api</artifactId>
</dependency>
<!-- for now, keep the titan 0.5.4 / TP 2 dependencies. This will need to be changed to use a
more generic query framework that supports both TP2/TP3. Maybe the DSL translation could be changed to use
that as well...
-->
<dependency>
<groupId>com.tinkerpop.blueprints</groupId>
<artifactId>blueprints-core</artifactId>
<version>${tinkerpop.version}</version>
</dependency>
<dependency>
<groupId>com.tinkerpop.gremlin</groupId>
<artifactId>gremlin-java</artifactId>
<version>${tinkerpop.version}</version>
</dependency>
<dependency>
<groupId>com.thinkaurelius.titan</groupId>
<artifactId>titan-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
<version>${lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId>
<version>${lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId>
<version>${lucene.version}</version>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</dependency>
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
</dependency>
<!-- testing -->
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<version>3.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-typesystem</artifactId>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.4</version>
<configuration>
<excludes>
<exclude>**/log4j.xml</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</project>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import java.util.Map;
/**
* Abstraction for Atlas Type System.
*/
public interface AtlasTypeSystem {
/**
* Create a Type in the Atlas Type System if it doesn't already exist.
* If the type already exists, this method has no affect.
*
* @param resourceDefinition resource definition for type being created
* @param name type name
* @param description description of the type being created
*
* @throws ResourceAlreadyExistsException if entity already exists
*/
void createClassType(ResourceDefinition resourceDefinition, String name, String description)
throws ResourceAlreadyExistsException;
/**
* Create an entity in the Atlas Type System for the provided request and resource definition.
* If Type associated with the entity doesn't already exist, it is created.
*
* @param definition the definition of the resource for which we are creating the entity
* @param request the user request
*
* @throws ResourceAlreadyExistsException if type already exists
*/
String createEntity(ResourceDefinition definition, Request request)
throws ResourceAlreadyExistsException;
/**
* Delete an entity from the Atlas type system.
*
* @param definition definition of the resource being deleted
* @param request user request
*
* @throws ResourceNotFoundException if the resource to delete doesn't exist
*/
void deleteEntity(ResourceDefinition definition, Request request) throws ResourceNotFoundException;
/**
* Create a trait instance instance in the Atlas Type System.
*
* @param resourceDefinition resource definition for trait type being created
* @param name type name
* @param description description of the type being created
*
* @throws ResourceAlreadyExistsException if type already exists
*/
void createTraitType(ResourceDefinition resourceDefinition, String name, String description)
throws ResourceAlreadyExistsException;
/**
* Create a trait instance in the Atlas Type System and associate it with the entity identified by the provided guid.
*
* @param guid id of the entity which will be associated with the trait instance
* @param typeName type name of the trait
* @param properties property map used to populate the trait instance
*
* @throws ResourceAlreadyExistsException if trait instance is already associated with the entity
*/
void createTraitInstance(String guid, String typeName, Map<String, Object> properties)
throws ResourceAlreadyExistsException;
/**
* Delete a tag instance.
*
* @param guid associated entity guid
* @param traitName name of the trait to delete
*
* @throws ResourceNotFoundException if the specified trait doesn't exist for the specified entity
*/
void deleteTag(String guid, String traitName) throws ResourceNotFoundException;
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
/**
* Base user API request.
*/
public abstract class BaseRequest implements Request {
private final Map<String, Object> queryProperties = new HashMap<>();
private final Map<String, Object> updateProperties = new HashMap<>();
private final String queryString;
private final Collection<String> additionalSelectProperties = new HashSet<>();
protected BaseRequest(Map<String, Object> queryProperties, String queryString) {
this(queryProperties, queryString, null);
}
protected BaseRequest(Map<String, Object> queryProperties, String queryString, Map<String, Object> updateProperties) {
if (queryProperties != null) {
this.queryProperties.putAll(queryProperties);
}
if (updateProperties != null) {
this.updateProperties.putAll(updateProperties);
}
this.queryString = queryString;
}
public Map<String, Object> getQueryProperties() {
return queryProperties;
}
public Map<String, Object> getUpdateProperties() {
return updateProperties;
}
public <T> T getProperty(String name) {
return (T) queryProperties.get(name);
}
public String getQueryString() {
return queryString;
}
@Override
public void addAdditionalSelectProperties(Collection<String> resultProperties) {
additionalSelectProperties.addAll(resultProperties);
}
@Override
public Collection<String> getAdditionalSelectProperties() {
return additionalSelectProperties;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BaseRequest that = (BaseRequest) o;
return Objects.equals(queryProperties, that.queryProperties) &&
Objects.equals(updateProperties, that.updateProperties) &&
Objects.equals(queryString, that.queryString) &&
Objects.equals(additionalSelectProperties, that.additionalSelectProperties);
}
@Override
public int hashCode() {
return Objects.hash(queryProperties, updateProperties, queryString, additionalSelectProperties);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.QueryFactory;
/**
* Base class for resource providers.
*/
public abstract class BaseResourceProvider implements ResourceProvider {
protected final AtlasTypeSystem typeSystem;
protected QueryFactory queryFactory = new QueryFactory();
protected final ResourceDefinition resourceDefinition;
protected BaseResourceProvider(AtlasTypeSystem typeSystem, ResourceDefinition resourceDefinition) {
this.typeSystem = typeSystem;
this.resourceDefinition = resourceDefinition;
}
protected void setQueryFactory(QueryFactory factory) {
queryFactory = factory;
}
@Override
public void deleteResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException {
throw new InvalidPayloadException("Delete is not supported for this resource type");
}
@Override
public void updateResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException {
throw new InvalidPayloadException("Update is not supported for this resource type");
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.Map;
/**
* A request for a collection resource.
*/
public class CollectionRequest extends BaseRequest {
public CollectionRequest(Map<String, Object> queryProperties, String queryString) {
super(queryProperties, queryString);
}
public CollectionRequest(Map<String, Object> queryProperties, String queryString, Map<String, Object> updateProperties) {
super(queryProperties, queryString, updateProperties);
}
@Override
public Cardinality getCardinality() {
return Cardinality.COLLECTION;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.GregorianCalendar;
/**
* Format a date field which is represented as a long.
*/
public class DefaultDateFormatter implements PropertyValueFormatter<Long, String> {
//todo: obtain format from atlas proper
public static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd:HH:mm:ss");
@Override
public String format(Long l) {
Calendar calendar = new GregorianCalendar();
calendar.setTimeInMillis(l);
return DATE_FORMAT.format(calendar.getTime());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.AtlasException;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.types.FieldMapping;
import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.atlas.typesystem.types.TypeSystem;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* Default property mapper which translates property names to/from name exposed in API to internal fully qualified name.
*/
public class DefaultPropertyMapper implements PropertyMapper {
//todo: abstract HierarchicalType
private Map<String, HierarchicalType> typeInstances = new HashMap<>();
private final Map<String, String> m_qualifiedToCleanMap = new HashMap<>();
private final Map<String, String> m_cleanToQualifiedMap = new HashMap<>();
public DefaultPropertyMapper() {
this(Collections.<String, String>emptyMap(), Collections.<String, String>emptyMap());
}
public DefaultPropertyMapper(Map<String, String> qualifiedToCleanMap,
Map<String, String> cleanToQualifiedMap) {
setDefaultMappings();
m_qualifiedToCleanMap.putAll(qualifiedToCleanMap);
m_cleanToQualifiedMap.putAll(cleanToQualifiedMap);
}
@Override
public String toCleanName(String propName, String type) {
HierarchicalType dataType = getDataType(type);
String replacement = m_qualifiedToCleanMap.get(propName);
if (replacement == null && dataType != null) {
FieldMapping fieldMap = dataType.fieldMapping();
if (! fieldMap.fields.containsKey(propName) && propName.contains(".")) {
String cleanName = propName.substring(propName.lastIndexOf('.') + 1);
if (fieldMap.fields.containsKey(cleanName)) {
replacement = cleanName;
}
}
}
if (replacement == null) {
replacement = propName;
}
return replacement;
}
@Override
public String toFullyQualifiedName(String propName, String type) {
HierarchicalType dataType = getDataType(type);
String replacement = m_cleanToQualifiedMap.get(propName);
if (replacement == null && dataType != null) {
FieldMapping fieldMap = dataType.fieldMapping();
if (fieldMap.fields.containsKey(propName)) {
try {
replacement = dataType.getQualifiedName(propName);
} catch (AtlasException e) {
throw new CatalogRuntimeException(String.format(
"Unable to resolve fully qualified property name for type '%s': %s", type, e), e);
}
}
}
if (replacement == null) {
replacement = propName;
}
return replacement;
}
//todo: abstract this via AtlasTypeSystem
protected synchronized HierarchicalType getDataType(String type) {
HierarchicalType dataType = typeInstances.get(type);
//todo: are there still cases where type can be null?
if (dataType == null) {
dataType = createDataType(type);
typeInstances.put(type, dataType);
}
return dataType;
}
protected HierarchicalType createDataType(String type) {
try {
return TypeSystem.getInstance().getDataType(HierarchicalType.class, type);
} catch (AtlasException e) {
throw new CatalogRuntimeException("Unable to get type instance from type system for type: " + type, e);
}
}
private void setDefaultMappings() {
//todo: these are all internal "__*" properties
//todo: should be able to ask type system for the "clean" name for these
m_qualifiedToCleanMap.put(Constants.GUID_PROPERTY_KEY, "id");
m_cleanToQualifiedMap.put("id", Constants.GUID_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.TIMESTAMP_PROPERTY_KEY, "creation_time");
m_cleanToQualifiedMap.put("creation_time", Constants.TIMESTAMP_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, "modified_time");
m_cleanToQualifiedMap.put("modified_time", Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.ENTITY_TYPE_PROPERTY_KEY, "type");
m_cleanToQualifiedMap.put("type", Constants.ENTITY_TYPE_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.VERSION_PROPERTY_KEY, "version");
m_cleanToQualifiedMap.put("version", Constants.VERSION_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.TRAIT_NAMES_PROPERTY_KEY, "trait_names");
m_cleanToQualifiedMap.put("trait_names", Constants.TRAIT_NAMES_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.SUPER_TYPES_PROPERTY_KEY, "super_types");
m_cleanToQualifiedMap.put("super_types", Constants.SUPER_TYPES_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.STATE_PROPERTY_KEY, "state");
m_cleanToQualifiedMap.put("state", Constants.STATE_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.CREATED_BY_KEY, "created_by");
m_cleanToQualifiedMap.put("created_by", Constants.CREATED_BY_KEY);
m_qualifiedToCleanMap.put(Constants.MODIFIED_BY_KEY, "modified_by");
m_cleanToQualifiedMap.put("modified_by", Constants.MODIFIED_BY_KEY);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.typedef.AtlasClassificationDef;
import org.apache.atlas.model.typedef.AtlasEntityDef;
import org.apache.atlas.model.typedef.AtlasEnumDef;
import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.model.typedef.AtlasTypesDef;
import org.apache.atlas.repository.converters.TypeConverterUtil;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.exception.EntityExistsException;
import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.typesystem.exception.TraitNotFoundException;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.atlas.typesystem.types.TraitType;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* Default implementation.
*/
public class DefaultTypeSystem implements AtlasTypeSystem {
private final MetadataService metadataService;
private final AtlasTypeDefStore typeDefStore;
/**
* Constructor.
*
* @param metadataService atlas metadata service
*/
public DefaultTypeSystem(MetadataService metadataService, AtlasTypeDefStore typeDefStore) throws AtlasBaseException {
this.metadataService = metadataService;
this.typeDefStore = typeDefStore;
}
@Override
public String createEntity(ResourceDefinition definition, Request request) throws ResourceAlreadyExistsException {
String typeName = definition.getTypeName();
try {
createClassType(definition, typeName, typeName + " Definition");
} catch (ResourceAlreadyExistsException e) {
// ok if type already exists
}
try {
Referenceable entity = new Referenceable(typeName, request.getQueryProperties());
//add Taxonomy Namespace
entity.set(TaxonomyResourceProvider.NAMESPACE_ATTRIBUTE_NAME, TaxonomyResourceProvider.TAXONOMY_NS);
ITypedReferenceableInstance typedInstance = metadataService.getTypedReferenceableInstance(entity);
ITypedReferenceableInstance[] entitiesToCreate = Collections.singletonList(typedInstance).toArray(new ITypedReferenceableInstance[1]);
final List<String> entities = metadataService.createEntities(entitiesToCreate).getCreatedEntities();
return entities != null && entities.size() > 0 ? entities.get(0) : null;
} catch (EntityExistsException e) {
throw new ResourceAlreadyExistsException(
"Attempted to create an entity which already exists: " + request.getQueryProperties());
} catch (AtlasException e) {
throw new CatalogRuntimeException("An expected exception occurred creating an entity: " + e, e);
}
}
@Override
public void deleteEntity(ResourceDefinition definition, Request request) throws ResourceNotFoundException {
String typeName = definition.getTypeName();
String cleanIdPropName = definition.getIdPropertyName();
String idValue = request.getProperty(cleanIdPropName);
try {
// transaction handled by atlas repository
metadataService.deleteEntityByUniqueAttribute(typeName, cleanIdPropName, idValue);
} catch (EntityNotFoundException e) {
throw new ResourceNotFoundException(String.format("The specified entity doesn't exist: type=%s, %s=%s",
typeName, cleanIdPropName, idValue));
} catch (AtlasException e) {
throw new CatalogRuntimeException(String.format(
"An unexpected error occurred while attempting to delete entity: type=%s, %s=%s : %s",
typeName, cleanIdPropName, idValue, e), e);
}
}
@Override
public void createClassType(ResourceDefinition resourceDefinition, String name, String description)
throws ResourceAlreadyExistsException {
createType(resourceDefinition.getPropertyDefinitions(), ClassType.class, name, description, false);
}
@Override
public void createTraitType(ResourceDefinition resourceDefinition, String name, String description)
throws ResourceAlreadyExistsException {
createType(resourceDefinition.getPropertyDefinitions(), TraitType.class, name, description, true);
}
@Override
public void createTraitInstance(String guid, String typeName, Map<String, Object> properties)
throws ResourceAlreadyExistsException {
try {
// not using the constructor with properties argument because it is marked 'InterfaceAudience.Private'
Struct struct = new Struct(typeName);
for (Map.Entry<String, Object> propEntry : properties.entrySet()) {
struct.set(propEntry.getKey(), propEntry.getValue());
}
//add Taxonomy Namespace
struct.set(TaxonomyResourceProvider.NAMESPACE_ATTRIBUTE_NAME, TaxonomyResourceProvider.TAXONOMY_NS);
metadataService.addTrait(guid, metadataService.createTraitInstance(struct));
} catch (IllegalArgumentException e) {
//todo: unfortunately, IllegalArgumentException can be thrown for other reasons
if (e.getMessage().contains("is already defined for entity")) {
throw new ResourceAlreadyExistsException(
String.format("Tag '%s' already associated with the entity", typeName));
} else {
throw e;
}
} catch (AtlasException e) {
throw new CatalogRuntimeException(String.format(
"Unable to create trait instance '%s' in type system: %s", typeName, e), e);
}
}
@Override
public void deleteTag(String guid, String traitName) throws ResourceNotFoundException {
try {
metadataService.deleteTrait(guid, traitName);
} catch (TraitNotFoundException e) {
throw new ResourceNotFoundException(String.format(
"The trait '%s' doesn't exist for entity '%s'", traitName, guid));
} catch (AtlasException e) {
throw new CatalogRuntimeException(String.format(
"Unable to delete tag '%s' from entity '%s'", traitName, guid), e);
}
}
private <T extends HierarchicalType> void createType(Collection<AttributeDefinition> attributes,
Class<T> type,
String name,
String description,
boolean isTrait)
throws ResourceAlreadyExistsException {
try {
List<AtlasStructDef.AtlasAttributeDef> attrDefs = new ArrayList<>();
for (AttributeDefinition attrDefinition : attributes) {
attrDefs.add(TypeConverterUtil.toAtlasAttributeDef(attrDefinition));
}
if ( isTrait) {
AtlasClassificationDef classificationDef = new AtlasClassificationDef(name, description, "1.0", attrDefs, ImmutableSet.of(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE));
AtlasTypesDef typesDef = new AtlasTypesDef(ImmutableList.<AtlasEnumDef>of(), ImmutableList.<AtlasStructDef>of(),
ImmutableList.of(classificationDef),
ImmutableList.<AtlasEntityDef>of());
typeDefStore.createTypesDef(typesDef);
} else {
AtlasEntityDef entityDef = new AtlasEntityDef(name, description, "1.0", attrDefs);
AtlasTypesDef typesDef = new AtlasTypesDef(ImmutableList.<AtlasEnumDef>of(), ImmutableList.<AtlasStructDef>of(),
ImmutableList.<AtlasClassificationDef>of(),
ImmutableList.of(entityDef));
typeDefStore.createTypesDef(typesDef);
}
} catch (AtlasBaseException e) {
if ( e.getAtlasErrorCode() == AtlasErrorCode.TYPE_ALREADY_EXISTS) {
throw new ResourceAlreadyExistsException(String.format("Type '%s' already exists", name));
} else {
throw new CatalogRuntimeException(String.format(
"Unable to create type '%s' in type system: %s", name, e), e);
}
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.EntityResourceDefinition;
import org.apache.atlas.catalog.exception.*;
import org.apache.atlas.catalog.query.AtlasQuery;
import java.util.*;
/**
* Provider for entity resources.
*/
public class EntityResourceProvider extends BaseResourceProvider implements ResourceProvider {
public EntityResourceProvider(AtlasTypeSystem typeSystem) {
super(typeSystem, new EntityResourceDefinition());
}
@Override
public Result getResourceById(Request request) throws ResourceNotFoundException {
AtlasQuery atlasQuery;
try {
atlasQuery = queryFactory.createEntityQuery(request);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Unable to compile internal Entity query: " + e, e);
}
Collection<Map<String, Object>> results = atlasQuery.execute();
if (results.isEmpty()) {
throw new ResourceNotFoundException(String.format("Entity '%s' not found.",
request.getProperty(resourceDefinition.getIdPropertyName())));
}
return new Result(results);
}
@Override
public Result getResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
AtlasQuery atlasQuery = queryFactory.createEntityQuery(request);
return new Result(atlasQuery.execute());
}
@Override
public void createResource(Request request)
throws InvalidPayloadException, ResourceAlreadyExistsException, ResourceNotFoundException {
// creation of entities is currently unsupported
throw new UnsupportedOperationException("Creation of entities is not currently supported");
}
@Override
public Collection<String> createResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
throw new UnsupportedOperationException("Creation of entities is not currently supported");
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.apache.atlas.catalog.exception.*;
import org.apache.atlas.catalog.query.AtlasQuery;
import java.util.*;
/**
* Provider for entity tag resources.
*/
public class EntityTagResourceProvider extends BaseResourceProvider implements ResourceProvider {
private TermResourceProvider termResourceProvider;
public EntityTagResourceProvider(AtlasTypeSystem typeSystem) {
super(typeSystem, new EntityTagResourceDefinition());
}
@Override
public Result getResourceById(Request request) throws ResourceNotFoundException {
AtlasQuery atlasQuery;
try {
atlasQuery = queryFactory.createEntityTagQuery(request);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Unable to compile internal Entity Tag query: " + e, e);
}
Collection<Map<String, Object>> results = atlasQuery.execute();
if (results.isEmpty()) {
throw new ResourceNotFoundException(String.format("Tag '%s' not found.",
request.getProperty(resourceDefinition.getIdPropertyName())));
}
return new Result(results);
}
@Override
public Result getResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
AtlasQuery atlasQuery = queryFactory.createEntityTagQuery(request);
return new Result(atlasQuery.execute());
}
@Override
public void createResource(Request request)
throws InvalidPayloadException, ResourceAlreadyExistsException, ResourceNotFoundException {
String entityId = String.valueOf(request.getQueryProperties().remove("id"));
resourceDefinition.validateCreatePayload(request);
Result termResult = getTermQueryResult(request.<String>getProperty("name"));
Map<String, Object> termProperties = termResult.getPropertyMaps().iterator().next();
//todo: use constant for property name
if (String.valueOf(termProperties.get("available_as_tag")).equals("false")) {
throw new InvalidPayloadException(
"Attempted to tag an entity with a term which is not available to be tagged");
}
tagEntities(Collections.singleton(entityId), termProperties);
}
//todo: response for case mixed case where some subset of creations fail
@Override
public Collection<String> createResources(Request request)
throws InvalidQueryException, ResourceNotFoundException, ResourceAlreadyExistsException {
Collection<String> relativeUrls = new ArrayList<>();
AtlasQuery atlasQuery = queryFactory.createEntityQuery(request);
Collection<String> guids = new ArrayList<>();
for (Map<String, Object> entityMap: atlasQuery.execute()) {
guids.add(String.valueOf(entityMap.get("id")));
}
Collection<Map<String, String>> tagMaps = request.getProperty("tags");
for (Map<String, String> tagMap : tagMaps) {
Result termResult = getTermQueryResult(tagMap.get("name"));
relativeUrls.addAll(tagEntities(guids, termResult.getPropertyMaps().iterator().next()));
}
return relativeUrls;
}
@Override
public void deleteResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException {
typeSystem.deleteTag(request.<String>getProperty("id"), request.<String>getProperty("name"));
}
private Result getTermQueryResult(String termName) throws ResourceNotFoundException {
Request tagRequest = new InstanceRequest(
Collections.<String, Object>singletonMap("termPath", new TermPath(termName)));
tagRequest.addAdditionalSelectProperties(Collections.singleton("type"));
return getTermResourceProvider().getResourceById(tagRequest);
}
private Collection<String> tagEntities(Collection<String> entityGuids, Map<String, Object> termProperties)
throws ResourceAlreadyExistsException {
Collection<String> relativeUrls = new ArrayList<>();
for (String guid : entityGuids) {
//createTermEdge(entity, Collections.singleton(termVertex));
// copy term properties from trait associated with taxonomy to be set
// on trait associated with new entity (basically clone at time of tag event)
//todo: any changes to 'singleton' trait won't be reflected in new trait
//todo: iterate over properties in term definition instead of hard coding here
Map<String, Object> properties = new HashMap<>();
String termName = String.valueOf(termProperties.get("name"));
properties.put("name", termName);
properties.put("description", termProperties.get("description"));
typeSystem.createTraitInstance(guid, termName, properties);
//todo: *** shouldn't know anything about href structure in this class ***
relativeUrls.add(String.format("v1/entities/%s/tags/%s", guid, termName));
}
return relativeUrls;
}
protected synchronized ResourceProvider getTermResourceProvider() {
if (termResourceProvider == null) {
termResourceProvider = new TermResourceProvider(typeSystem);
}
return termResourceProvider;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.Map;
/**
* A request for an instance resource.
*/
public class InstanceRequest extends BaseRequest {
public InstanceRequest(Map<String, Object> queryProperties) {
super(queryProperties, null);
}
public InstanceRequest(Map<String, Object> queryProperties, Map<String, Object> updateProperties) {
super(queryProperties, null, updateProperties);
}
@Override
public Cardinality getCardinality() {
return Cardinality.INSTANCE;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import com.google.gson.stream.JsonWriter;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* JSON serializer.
*/
public class JsonSerializer {
public String serialize(Result result, UriInfo ui) {
Writer json = new StringWriter();
JsonWriter writer = new JsonWriter(json);
writer.setIndent(" ");
try {
writeValue(writer, result.getPropertyMaps(), ui.getBaseUri().toASCIIString());
} catch (IOException e) {
throw new CatalogRuntimeException("Unable to write JSON response.", e);
}
return json.toString();
}
private void writeValue(JsonWriter writer, Object value, String baseUrl) throws IOException {
if (value == null) {
writer.nullValue();
} else if (value instanceof Map) {
writer.beginObject();
LinkedHashMap<String, Object> nonScalarMap = new LinkedHashMap<>();
for (Map.Entry<String, Object> entry : ((Map<String, Object>) value).entrySet()) {
String key = entry.getKey();
Object val = entry.getValue();
if (val == null || ! (val instanceof Collection || val instanceof Map)) {
//todo: use a token in value instead of prop name
if (key.equals("href")) {
val = baseUrl + String.valueOf(val);
}
writer.name(key);
writeValue(writer, val, baseUrl);
} else {
nonScalarMap.put(key, val);
}
}
for (Map.Entry<String, Object> entry : nonScalarMap.entrySet()) {
writer.name(entry.getKey());
writeValue(writer, entry.getValue(), baseUrl);
}
writer.endObject();
} else if (value instanceof Collection) {
writer.beginArray();
for (Object o : (Collection) value) {
writeValue(writer, o, baseUrl);
}
writer.endArray();
} else if (value instanceof Number) {
writer.value((Number) value);
} else if (value instanceof Boolean) {
writer.value((Boolean) value);
} else {
// everything else is String
writer.value(String.valueOf(value));
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
/**
* Translates property names to/from name exposed in API to internal fully qualified name.
*/
public interface PropertyMapper {
/**
* Translate a qualified name to a clean name.
*
* @param propName property name to translate
* @param type resource type
*
* @return clean property name
*/
String toCleanName(String propName, String type);
/**
* Translate a clean name to a fully qualified name.
*
* @param propName property name to translate
* @param type resource type
*
* @return fully qualified property name
*/
String toFullyQualifiedName(String propName, String type);
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
/**
* A rule for translating a property value.
*/
public interface PropertyValueFormatter <T,V> {
/**
* Format a property value.
*
* @param value property value to format
*
* @return formatted property value
*/
V format(T value);
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.Collection;
import java.util.Map;
/**
* Represents a user request.
*/
public interface Request {
/**
* Request cardinality enum.
*/
enum Cardinality {INSTANCE, COLLECTION}
/**
* Get query properties of request.
* These are the properties which are used to build the query.
*
* @return query property map
*/
Map<String, Object> getQueryProperties();
/**
* Get update properties of request.
* These properties are updated on all resources which are returned from the query.
*
* @return update property map
*/
Map<String, Object> getUpdateProperties();
/**
* Get the value of a specified property.
*
* @param name property name
* @param <T> value type
*
* @return value for the requested property or null if property not in map
*/
<T> T getProperty(String name);
/**
* Get the query string.
*
* @return the user specified query string or null
*/
String getQueryString();
/**
* Get the cardinality of the request.
*
* @return the request cardinality
*/
Cardinality getCardinality();
/**
* Add additional property names which should be returned in the result.
*
* @param resultProperties collection of property names
*/
void addAdditionalSelectProperties(Collection<String> resultProperties);
/**
* Get any additional property names which should be included in the result.
*
* @return collection of added property names or an empty collection
*/
Collection<String> getAdditionalSelectProperties();
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
/**
* Provides key ordering for resource property maps.
* Ordering can be defined explicitly for specific properties,
* otherwise natural ordering is used.
*/
public class ResourceComparator implements Comparator<String> {
private static List<String> ordering = new ArrayList<>();
@Override
public int compare(String s1, String s2) {
if (s1.equals(s2)) {
return 0;
}
int s1Order = ordering.indexOf(s1);
int s2Order = ordering.indexOf(s2);
if (s1Order == -1 && s2Order == -1) {
return s1.compareTo(s2);
}
if (s1Order != -1 && s2Order != -1) {
return s1Order - s2Order;
}
return s1Order == -1 ? 1 : -1;
}
//todo: each resource definition can provide its own ordering list
static {
ordering.add("href");
ordering.add("name");
ordering.add("id");
ordering.add("description");
ordering.add("type");
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.exception.*;
import java.util.Collection;
/**
* Provider for a resource type.
*/
public interface ResourceProvider {
/**
* Get a resource by primary key.
*
* @param request request instance which contains the required id properties and no query string
* @return result containing the requested resource; never null
*
* @throws ResourceNotFoundException if the requested resource isn't found
*/
Result getResourceById(Request request) throws ResourceNotFoundException;
/**
* Get all resources which match the provider query.
*
* @param request request instance which will include a query string and possibly properties
* @return result containing collection of matching resources. If no resources match
* a result is returned with no resources
*
* @throws InvalidQueryException if the user query contains invalid syntax
* @throws ResourceNotFoundException if a parent resource of the requested resource doesn't exist
*/
Result getResources(Request request) throws InvalidQueryException, ResourceNotFoundException;
/**
* Create a single resource.
*
* @param request request instance containing the contents of the resource to create
*
* @throws InvalidPayloadException if the payload or any other part of the user request is invalid
* @throws ResourceAlreadyExistsException if the resource already exists
* @throws ResourceNotFoundException if a parent of the resource to create doesn't exist
*/
void createResource(Request request)
throws InvalidPayloadException, ResourceAlreadyExistsException, ResourceNotFoundException;
/**
* Delete a single resource.
*
* @param request request instance containing the id of the resource to delete.
*
* @throws ResourceNotFoundException if the resource doesn't exist
* @throws InvalidPayloadException if the request is invalid
*/
void deleteResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException;
//todo: define the behavior for partial success
/**
* Create multiple resources.
*
* @param request request instance containing the contents of 1..n resources
* @return collection of relative urls for the created resources
*
* @throws InvalidPayloadException if the payload or any other part of the user request is invalid
* @throws ResourceAlreadyExistsException if the resource already exists
* @throws ResourceNotFoundException if a parent of the resource to create doesn't exist
*/
Collection<String> createResources(Request request) throws CatalogException;
/**
* Update a single resource.
*
* @param request request instance containing the contents of the resource to update
*
* @throws ResourceNotFoundException if the resource doesn't exist
* @throws InvalidPayloadException if the request payload is invalid
*/
void updateResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException;
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.Collection;
import java.util.Map;
/**
* Resource provider result.
*/
public class Result {
/**
* collection of property maps
*/
private Collection<Map<String, Object>> propertyMaps;
/**
* Constructor.
*
* @param propertyMaps collection of property maps
*/
public Result(Collection<Map<String, Object>> propertyMaps) {
this.propertyMaps = propertyMaps;
}
/**
* Obtain the result property maps.
*
* @return result property maps
*/
public Collection<Map<String, Object>> getPropertyMaps() {
return propertyMaps;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasException;
import org.apache.atlas.catalog.definition.TaxonomyResourceDefinition;
import org.apache.atlas.catalog.exception.*;
import org.apache.atlas.catalog.query.AtlasQuery;
import org.apache.commons.configuration.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* Provider for taxonomy resources.
*/
public class TaxonomyResourceProvider extends BaseResourceProvider implements ResourceProvider {
private static final Logger LOG = LoggerFactory.getLogger(TaxonomyResourceProvider.class);
public static final String DEFAULT_TAXONOMY_NAME = "Catalog";
public static final String DEFAULT_TAXONOMY_DESCRIPTION = "Business Catalog";
public static final String NAMESPACE_ATTRIBUTE_NAME = "taxonomy.namespace";
// Taxonomy Term type
public static final String TAXONOMY_TERM_TYPE = "TaxonomyTerm";
// Taxonomy Namespace
public static final String TAXONOMY_NS = "atlas.taxonomy";
private final TermResourceProvider termResourceProvider;
// This is a cached value to prevent checking for taxonomy objects in every API call.
// It is updated once per lifetime of the application.
// TODO: If a taxonomy is deleted outside of this application, this value is not updated
// TODO: and there is no way in which a taxonomy will be auto-created.
// TODO: Assumption is that if a taxonomy is deleted externally, it will be created externally as well.
private static boolean taxonomyAutoInitializationChecked = false;
public TaxonomyResourceProvider(AtlasTypeSystem typeSystem) {
super(typeSystem, new TaxonomyResourceDefinition());
termResourceProvider = new TermResourceProvider(typeSystem);
}
@Override
public Result getResourceById(Request request) throws ResourceNotFoundException {
synchronized (TaxonomyResourceProvider.class) {
createDefaultTaxonomyIfNeeded();
}
return doGetResourceById(request);
}
@Override
public Result getResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
synchronized (TaxonomyResourceProvider.class) {
createDefaultTaxonomyIfNeeded();
}
return doGetResources(request);
}
@Override
public void createResource(Request request)
throws InvalidPayloadException, ResourceAlreadyExistsException {
// not checking for default taxonomy in create per requirements
resourceDefinition.validateCreatePayload(request);
synchronized (TaxonomyResourceProvider.class) {
ensureTaxonomyDoesntExist(request);
doCreateResource(request);
}
}
@Override
public Collection<String> createResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
throw new UnsupportedOperationException(
"Creating multiple Taxonomies in a request is not currently supported");
}
@Override
public void deleteResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException {
String taxonomyId = getResourceId(request);
getTermResourceProvider().deleteChildren(taxonomyId, new TermPath(request.<String>getProperty("name")));
typeSystem.deleteEntity(resourceDefinition, request);
}
@Override
public void updateResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException {
resourceDefinition.validateUpdatePayload(request);
AtlasQuery atlasQuery;
try {
atlasQuery = queryFactory.createTaxonomyQuery(request);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Unable to compile internal Term query: " + e, e);
}
synchronized (TaxonomyResourceProvider.class) {
createDefaultTaxonomyIfNeeded();
if (atlasQuery.execute(request.getUpdateProperties()).isEmpty()) {
throw new ResourceNotFoundException(String.format("Taxonomy '%s' not found.",
request.getQueryProperties().get("name")));
}
}
}
private String getResourceId(Request request) throws ResourceNotFoundException {
request.addAdditionalSelectProperties(Collections.singleton("id"));
// will result in expected ResourceNotFoundException if taxonomy doesn't exist
Result result = getResourceById(request);
return String.valueOf(result.getPropertyMaps().iterator().next().get("id"));
}
//todo: this is currently required because the expected exception isn't thrown by the Atlas repository
//todo: when an attempt is made to create an entity that already exists
// must be called from within class monitor
private void ensureTaxonomyDoesntExist(Request request) throws ResourceAlreadyExistsException {
try {
doGetResourceById(request);
throw new ResourceAlreadyExistsException(String.format("Taxonomy '%s' already exists.",
request.getProperty("name")));
} catch (ResourceNotFoundException e) {
// expected case
}
}
// must be called from within class monitor
private Result doGetResourceById(Request request) throws ResourceNotFoundException {
AtlasQuery atlasQuery;
try {
atlasQuery = queryFactory.createTaxonomyQuery(request);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Unable to compile internal Taxonomy query: " + e, e);
}
Collection<Map<String, Object>> resultSet = atlasQuery.execute();
if (resultSet.isEmpty()) {
throw new ResourceNotFoundException(String.format("Taxonomy '%s' not found.",
request.getProperty(resourceDefinition.getIdPropertyName())));
}
return new Result(resultSet);
}
// must be called from within class monitor
private Result doGetResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
AtlasQuery atlasQuery = queryFactory.createTaxonomyQuery(request);
return new Result(atlasQuery.execute());
}
// must be called from within class monitor
private void doCreateResource(Request request) throws ResourceAlreadyExistsException {
typeSystem.createEntity(resourceDefinition, request);
taxonomyAutoInitializationChecked = true;
}
// must be called from within class monitor
private void createDefaultTaxonomyIfNeeded() {
if (! autoInitializationChecked()) {
try {
LOG.info("Checking if default taxonomy needs to be created.");
// if any business taxonomy has been created, don't create one more - hence searching to
// see if any taxonomy exists.
if (doGetResources(new CollectionRequest(null, null)).getPropertyMaps().isEmpty()) {
LOG.info("No taxonomies found - going to create default taxonomy.");
Map<String, Object> requestProperties = new HashMap<>();
String defaultTaxonomyName = DEFAULT_TAXONOMY_NAME;
try {
Configuration configuration = ApplicationProperties.get();
defaultTaxonomyName = configuration.getString("atlas.taxonomy.default.name",
defaultTaxonomyName);
} catch (AtlasException e) {
LOG.warn("Unable to read Atlas configuration, will use {} as default taxonomy name",
defaultTaxonomyName, e);
}
requestProperties.put("name", defaultTaxonomyName);
requestProperties.put("description", DEFAULT_TAXONOMY_DESCRIPTION);
doCreateResource(new InstanceRequest(requestProperties));
LOG.info("Successfully created default taxonomy {}.", defaultTaxonomyName);
} else {
taxonomyAutoInitializationChecked = true;
LOG.info("Some taxonomy exists, not creating default taxonomy");
}
} catch (InvalidQueryException | ResourceNotFoundException e) {
LOG.error("Unable to query for existing taxonomies due to internal error.", e);
} catch (ResourceAlreadyExistsException e) {
LOG.info("Attempted to create default taxonomy and it already exists.");
}
}
}
protected boolean autoInitializationChecked() {
return taxonomyAutoInitializationChecked;
}
protected TermResourceProvider getTermResourceProvider() {
return termResourceProvider;
}
}
\ No newline at end of file
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
/**
* Term path information.
*/
//todo: split between Term and TermPath
public class TermPath {
private final String m_taxonomy;
private final String m_fqn;
private final String m_name;
private final String[] m_paths;
public TermPath(String fullyQualifiedName) {
m_fqn = fullyQualifiedName;
//todo: validation
int idx = fullyQualifiedName.indexOf('.');
if (idx != -1) {
m_taxonomy = fullyQualifiedName.substring(0, idx);
m_name = fullyQualifiedName.substring(idx + 1);
m_paths = m_name.split("\\.");
} else {
m_taxonomy = fullyQualifiedName;
m_name = null;
m_paths = new String[0];
}
}
public TermPath(String taxonomyName, String termName) {
m_taxonomy = taxonomyName;
m_name = termName != null && termName.isEmpty() ? null : termName;
if (m_name != null) {
m_fqn = String.format("%s.%s", taxonomyName, termName);
m_paths = termName.split("\\.");
} else {
m_fqn = taxonomyName;
m_paths = new String[0];
}
}
/**
* Get the absolute term name which is in the form of TAXONOMY_NAME.TERM_NAME
*
* @return absolute term name which includes the taxonomy name
*/
public String getFullyQualifiedName() {
return m_fqn;
}
/**
* Get the term name. This differs from the absolute name in that it doesn't
* include the taxonomy name.
*
* @return the term name
*/
public String getName() {
return m_name;
}
/**
* Get the short name for the term which doesn't include any taxonomy or parent information.
* @return term short name
*/
public String getShortName() {
return m_paths[m_paths.length - 1];
}
public String getPath() {
if (m_name == null) {
return "/";
} else {
int idx = m_fqn.indexOf('.');
int lastIdx = m_fqn.lastIndexOf('.');
return idx == lastIdx ? "/" :
m_fqn.substring(idx, lastIdx).replaceAll("\\.", "/");
}
}
public TermPath getParent() {
//todo: if this is the root path, throw exception
return new TermPath(m_taxonomy, m_name.substring(0, m_name.lastIndexOf('.')));
}
public String getTaxonomyName() {
return m_taxonomy;
}
public String[] getPathSegments() {
return m_paths;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.TermResourceDefinition;
import org.apache.atlas.catalog.exception.*;
import org.apache.atlas.catalog.query.AtlasQuery;
import java.util.*;
/**
* Provider for Term resources.
*/
public class TermResourceProvider extends BaseResourceProvider implements ResourceProvider {
private ResourceProvider taxonomyResourceProvider;
private ResourceProvider entityResourceProvider;
private ResourceProvider entityTagResourceProvider;
public TermResourceProvider(AtlasTypeSystem typeSystem) {
super(typeSystem, new TermResourceDefinition());
}
@Override
public Result getResourceById(Request request) throws ResourceNotFoundException {
//todo: shouldn't need to add this here
request.getQueryProperties().put("name", request.<TermPath>getProperty("termPath").getFullyQualifiedName());
AtlasQuery atlasQuery;
try {
atlasQuery = queryFactory.createTermQuery(request);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Unable to compile internal Term query: " + e, e);
}
Collection<Map<String, Object>> results = atlasQuery.execute();
if (results.isEmpty()) {
throw new ResourceNotFoundException(String.format("Term '%s' not found.",
request.<TermPath>getProperty("termPath").getFullyQualifiedName()));
}
return new Result(results);
}
public Result getResources(Request request)
throws InvalidQueryException, ResourceNotFoundException {
TermPath termPath = request.getProperty("termPath");
String queryString = doQueryStringConversions(termPath, request.getQueryString());
Request queryRequest = new CollectionRequest(request.getQueryProperties(), queryString);
AtlasQuery atlasQuery = queryFactory.createTermQuery(queryRequest);
Collection<Map<String, Object>> result = atlasQuery.execute();
return new Result(result);
}
public void createResource(Request request)
throws InvalidPayloadException, ResourceAlreadyExistsException, ResourceNotFoundException {
TermPath termPath = (TermPath) request.getQueryProperties().remove("termPath");
String qualifiedTermName = termPath.getFullyQualifiedName();
request.getQueryProperties().put("name", qualifiedTermName);
resourceDefinition.validateCreatePayload(request);
// get taxonomy
Request taxonomyRequest = new InstanceRequest(
Collections.<String, Object>singletonMap("name", termPath.getTaxonomyName()));
taxonomyRequest.addAdditionalSelectProperties(Collections.singleton("id"));
Result taxonomyResult = getTaxonomyResourceProvider().getResourceById(taxonomyRequest);
Map<String, Object> taxonomyPropertyMap = taxonomyResult.getPropertyMaps().iterator().next();
// ensure that parent exists if not a root level term
if (! termPath.getPath().equals("/")) {
Map<String, Object> parentProperties = new HashMap<>(request.getQueryProperties());
parentProperties.put("termPath", termPath.getParent());
getResourceById(new InstanceRequest(parentProperties));
}
typeSystem.createTraitType(resourceDefinition, qualifiedTermName,
request.<String>getProperty("description"));
typeSystem.createTraitInstance(String.valueOf(taxonomyPropertyMap.get("id")),
qualifiedTermName, request.getQueryProperties());
}
@Override
public Collection<String> createResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
throw new UnsupportedOperationException("Creating multiple Terms in a request is not currently supported");
}
@Override
public void updateResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException {
resourceDefinition.validateUpdatePayload(request);
String termName = request.<TermPath>getProperty("termPath").getFullyQualifiedName();
request.getQueryProperties().put("name", termName);
AtlasQuery atlasQuery;
try {
atlasQuery = queryFactory.createTermQuery(request);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Unable to compile internal Term query: " + e, e);
}
Map<String, Object> updateProperties = request.getUpdateProperties();
Collection<Map<String, Object>> results = atlasQuery.execute(updateProperties);
if (results.isEmpty()) {
throw new ResourceNotFoundException(String.format("Term '%s' not found.",
termName));
}
// only the term 'description' property is set on entity tags
if (updateProperties.containsKey("description")) {
// 'description' property is being updated so we need to update tags
String tagQueryString = String.format("name:%s", termName);
Request tagRequest = new CollectionRequest(
Collections.<String, Object>singletonMap("id", "*"), tagQueryString, null);
AtlasQuery tagQuery;
try {
tagQuery = queryFactory.createEntityTagQuery(tagRequest);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Unable to compile internal Entity Tag query: " + e, e);
}
tagQuery.execute(Collections.singletonMap("description", updateProperties.get("description")));
}
}
@Override
public void deleteResourceById(Request request) throws ResourceNotFoundException, InvalidPayloadException {
// will result in expected ResourceNotFoundException if term doesn't exist
getResourceById(request);
TermPath termPath = (TermPath) request.getQueryProperties().get("termPath");
String taxonomyId = getTaxonomyId(termPath);
deleteChildren(taxonomyId, termPath);
deleteTerm(taxonomyId, termPath);
}
protected void deleteChildren(String taxonomyId, TermPath termPath)
throws ResourceNotFoundException, InvalidPayloadException {
TermPath collectionTermPath = new TermPath(termPath.getFullyQualifiedName() + ".");
Request queryRequest = new CollectionRequest(Collections.<String, Object>singletonMap("termPath",
collectionTermPath), null);
AtlasQuery collectionQuery;
try {
collectionQuery = queryFactory.createTermQuery(queryRequest);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Failed to compile internal predicate: " + e, e);
}
Collection<Map<String, Object>> children = collectionQuery.execute();
for (Map<String, Object> childMap : children) {
deleteTerm(taxonomyId, new TermPath(String.valueOf(childMap.get("name"))));
}
}
private void deleteTerm(String taxonomyId, TermPath termPath)
throws ResourceNotFoundException, InvalidPayloadException {
String fullyQualifiedName = termPath.getFullyQualifiedName();
deleteEntityTagsForTerm(fullyQualifiedName);
// delete term instance associated with the taxonomy
typeSystem.deleteTag(taxonomyId, fullyQualifiedName);
//todo: Currently no way to delete type via MetadataService or MetadataRepository
}
private void deleteEntityTagsForTerm(String fullyQualifiedName) throws ResourceNotFoundException {
String entityQueryStr = String.format("tags/name:%s", fullyQualifiedName);
Request entityRequest = new CollectionRequest(Collections.<String, Object>emptyMap(), entityQueryStr);
Result entityResult;
try {
entityResult = getEntityResourceProvider().getResources(entityRequest);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException(String.format(
"Failed to compile internal predicate for query '%s': %s", entityQueryStr, e), e);
}
for (Map<String, Object> entityResultMap : entityResult.getPropertyMaps()) {
Map<String, Object> tagRequestProperties = new HashMap<>();
tagRequestProperties.put("id", String.valueOf(entityResultMap.get("id")));
tagRequestProperties.put("name", fullyQualifiedName);
try {
getEntityTagResourceProvider().deleteResourceById(new InstanceRequest(tagRequestProperties));
} catch (InvalidPayloadException e) {
throw new CatalogRuntimeException(
"An internal error occurred while trying to delete an entity tag: " + e, e);
}
}
}
private String getTaxonomyId(TermPath termPath) throws ResourceNotFoundException {
Request taxonomyRequest = new InstanceRequest(Collections.<String, Object>singletonMap(
"name", termPath.getTaxonomyName()));
taxonomyRequest.addAdditionalSelectProperties(Collections.singleton("id"));
// will result in proper ResourceNotFoundException if taxonomy doesn't exist
Result taxonomyResult = getTaxonomyResourceProvider().getResourceById(taxonomyRequest);
Map<String, Object> taxonomyResultMap = taxonomyResult.getPropertyMaps().iterator().next();
return String.valueOf(taxonomyResultMap.get("id"));
}
//todo: add generic support for pre-query modification of expected value
//todo: similar path parsing code is used in several places in this class
private String doQueryStringConversions(TermPath termPath, String queryStr) throws InvalidQueryException {
String hierarchyPathProp = "hierarchy/path";
// replace "."
if (queryStr != null && queryStr.contains(String.format("%s:.", hierarchyPathProp))) {
//todo: regular expression replacement
queryStr = queryStr.replaceAll(String.format("%s:.", hierarchyPathProp),
String.format("%s:%s", hierarchyPathProp, termPath.getPath()));
}
return queryStr;
}
protected synchronized ResourceProvider getTaxonomyResourceProvider() {
if (taxonomyResourceProvider == null) {
taxonomyResourceProvider = new TaxonomyResourceProvider(typeSystem);
}
return taxonomyResourceProvider;
}
protected synchronized ResourceProvider getEntityResourceProvider() {
if (entityResourceProvider == null) {
entityResourceProvider = new EntityResourceProvider(typeSystem);
}
return entityResourceProvider;
}
protected synchronized ResourceProvider getEntityTagResourceProvider() {
if (entityTagResourceProvider == null) {
entityTagResourceProvider = new EntityTagResourceProvider(typeSystem);
}
return entityTagResourceProvider;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
/**
* Wrapper for term vertices.
*/
public class TermVertexWrapper extends VertexWrapper {
public TermVertexWrapper(Vertex v) {
super(v, new EntityTagResourceDefinition());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.*;
/**
* Wrapper for vertices which provides additional information.
*/
public class VertexWrapper {
private final Vertex vertex;
private final String vertexType;
private final Set<String> removedProperties = new HashSet<>();
private final PropertyMapper propertyMapper;
private final Map<String, PropertyValueFormatter> propertyValueFormatters;
protected ResourceComparator resourceComparator = new ResourceComparator();
public VertexWrapper(Vertex v, ResourceDefinition resourceDefinition) {
this(v, resourceDefinition.getPropertyMapper(), resourceDefinition.getPropertyValueFormatters());
}
public VertexWrapper(Vertex v,
PropertyMapper mapper,
Map<String, PropertyValueFormatter> formatters) {
vertex = v;
vertexType = getVertexType(v);
propertyMapper = mapper;
propertyValueFormatters = formatters;
}
public Vertex getVertex() {
return vertex;
}
public <T> T getProperty(String name) {
T val;
if (removedProperties.contains(name)) {
val = null;
} else {
val = vertex.getProperty(propertyMapper.toFullyQualifiedName(name, vertexType));
if (propertyValueFormatters.containsKey(name)) {
//todo: fix typing of property mapper
val = (T) propertyValueFormatters.get(name).format(val);
}
}
return val;
}
public void setProperty(String name, Object value) {
vertex.setProperty(propertyMapper.toFullyQualifiedName(name, vertexType), value);
}
public Collection<String> getPropertyKeys() {
Collection<String> propertyKeys = new TreeSet<>(resourceComparator);
for (String p : vertex.getPropertyKeys()) {
String cleanName = propertyMapper.toCleanName(p, vertexType);
if (! removedProperties.contains(cleanName)) {
propertyKeys.add(cleanName);
}
}
return propertyKeys;
}
public Map<String, Object> getPropertyMap() {
Map<String, Object> props = new TreeMap<>(resourceComparator);
for (String p : vertex.getPropertyKeys()) {
String cleanName = propertyMapper.toCleanName(p, vertexType);
if (! removedProperties.contains(cleanName)) {
Object val = vertex.getProperty(p);
if (propertyValueFormatters.containsKey(cleanName)) {
val = propertyValueFormatters.get(cleanName).format(val);
}
props.put(cleanName, val);
}
}
return props;
}
public void removeProperty(String name) {
removedProperties.add(name);
}
public boolean isPropertyRemoved(String name) {
return removedProperties.contains(name);
}
public String toString() {
return String.format("VertexWrapper[name=%s]", getProperty("name"));
}
private String getVertexType(Vertex v) {
return v.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.AtlasException;
import org.apache.atlas.catalog.*;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.TypeSystem;
import java.util.*;
/**
* Base class for resource definitions.
*/
public abstract class BaseResourceDefinition implements ResourceDefinition {
protected static final TypeSystem typeSystem = TypeSystem.getInstance();
protected final Set<String> instanceProperties = new HashSet<>();
protected final Set<String> collectionProperties = new HashSet<>();
protected Map<String, AttributeDefinition> propertyDefs = new HashMap<>();
protected Map<String, AttributeInfo> properties = new HashMap<>();
protected final Map<String, Projection> projections = new HashMap<>();
protected final Map<String, Relation> relations = new HashMap<>();
protected final PropertyMapper propertyMapper;
protected final Map<String, PropertyValueFormatter> propertyValueFormatters = new HashMap<>();
public BaseResourceDefinition() {
DefaultDateFormatter defaultDateFormatter = new DefaultDateFormatter();
registerPropertyValueFormatter("creation_time", defaultDateFormatter);
registerPropertyValueFormatter("modified_time", defaultDateFormatter);
this.propertyMapper = createPropertyMapper();
}
@Override
public void validateCreatePayload(Request request) throws InvalidPayloadException {
Collection<String> propKeys = new HashSet<>(request.getQueryProperties().keySet());
Collection<String> missingProperties = new HashSet<>();
for (AttributeInfo property : properties.values()) {
String name = property.name;
if (property.multiplicity == Multiplicity.REQUIRED) {
if (request.getProperty(name) == null) {
missingProperties.add(name);
}
}
propKeys.remove(name);
}
if (! missingProperties.isEmpty() || ! propKeys.isEmpty()) {
throw new InvalidPayloadException(missingProperties, propKeys);
}
//todo: property type validation
}
@Override
public void validateUpdatePayload(Request request) throws InvalidPayloadException {
Collection<String> updateKeys = new HashSet<>(request.getUpdateProperties().keySet());
Collection<String> validProperties = new HashSet<>(properties.keySet());
// currently updating 'name' property for any resource is unsupported
validProperties.remove("name");
updateKeys.removeAll(validProperties);
if (! updateKeys.isEmpty()) {
throw new InvalidPayloadException(Collections.<String>emptySet(), updateKeys);
}
}
@Override
public Collection<AttributeDefinition> getPropertyDefinitions() {
return propertyDefs.values();
}
@Override
public Map<String, Object> filterProperties(Request request, Map<String, Object> propertyMap) {
Request.Cardinality cardinality = request.getCardinality();
Collection<String> requestProperties = request.getAdditionalSelectProperties();
Iterator<Map.Entry<String, Object>> propIter = propertyMap.entrySet().iterator();
while(propIter.hasNext()) {
Map.Entry<String, Object> propEntry = propIter.next();
String prop = propEntry.getKey();
if (! requestProperties.contains(prop)) {
if (cardinality == Request.Cardinality.COLLECTION) {
if (! collectionProperties.contains(prop)) {
propIter.remove();
}
} else {
if (! instanceProperties.isEmpty() && ! instanceProperties.contains(prop)) {
propIter.remove();
}
}
}
}
return propertyMap;
}
@Override
public Map<String, Projection> getProjections() {
return projections;
}
@Override
public Map<String, Relation> getRelations() {
return relations;
}
@Override
public synchronized PropertyMapper getPropertyMapper() {
return propertyMapper;
}
@Override
public Map<String, PropertyValueFormatter> getPropertyValueFormatters() {
return propertyValueFormatters;
}
protected void registerProperty(AttributeDefinition propertyDefinition) {
try {
propertyDefs.put(propertyDefinition.name, propertyDefinition);
properties.put(propertyDefinition.name, new AttributeInfo(typeSystem, propertyDefinition, null));
} catch (AtlasException e) {
throw new CatalogRuntimeException("Unable to create attribute: " + propertyDefinition.name, e);
}
}
protected void registerPropertyValueFormatter(String property, PropertyValueFormatter valueFormatter) {
propertyValueFormatters.put(property, valueFormatter);
}
/**
* Create a new property mapper instance.
* Should be overridden in children where the default implementation isn't sufficient.
*
* @return a new property mapper instance
*/
protected PropertyMapper createPropertyMapper() {
return new DefaultPropertyMapper();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
/**
* Entity resource definition.
*/
public class EntityResourceDefinition extends BaseResourceDefinition {
public EntityResourceDefinition() {
collectionProperties.add("name");
collectionProperties.add("id");
collectionProperties.add("type");
RelationProjection tagProjection = getTagProjection();
projections.put("tags", tagProjection);
RelationProjection traitProjection = getTraitProjection();
projections.put("traits", traitProjection);
projections.put("default", getDefaultRelationProjection());
relations.put(tagProjection.getName(), tagProjection.getRelation());
relations.put(traitProjection.getName(), traitProjection.getRelation());
}
@Override
public String getIdPropertyName() {
return "id";
}
// not meaningful for entities
@Override
public String getTypeName() {
return null;
}
@Override
public void validateCreatePayload(Request request) throws InvalidPayloadException {
// no op for entities as we don't currently create entities and
// each entity type is different
}
@Override
public String resolveHref(Map<String, Object> properties) {
Object id = properties.get("id");
return id == null ? null : String.format("v1/entities/%s", id);
}
private RelationProjection getTagProjection() {
Relation traitRelation = new TagRelation();
RelationProjection tagProjection = new RelationProjection("tags", Collections.singleton("name"),
traitRelation, Projection.Cardinality.MULTIPLE);
tagProjection.addPipe(new TransformFunctionPipe<>(
new PipeFunction<Collection<ProjectionResult>, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(Collection<ProjectionResult> results) {
for (ProjectionResult result : results) {
for (Map<String, Object> properties : result.getPropertyMaps()) {
properties.put("href", String.format("v1/entities/%s/tags/%s",
result.getStartingVertex().getProperty("id"), properties.get("name")));
}
}
return results;
}
}));
return tagProjection;
}
private RelationProjection getTraitProjection() {
return new RelationProjection("traits", Collections.<String>emptySet(),
new TraitRelation(), Projection.Cardinality.MULTIPLE);
}
private RelationProjection getDefaultRelationProjection() {
Relation genericRelation = new GenericRelation(this);
RelationProjection relationProjection = new RelationProjection(
"relations",
Arrays.asList("type", "id", "name"),
genericRelation, Projection.Cardinality.MULTIPLE);
relationProjection.addPipe(new TransformFunctionPipe<>(
new PipeFunction<Collection<ProjectionResult>, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(Collection<ProjectionResult> results) {
for (ProjectionResult result : results) {
for (Map<String, Object> properties : result.getPropertyMaps()) {
properties.put("href", String.format("v1/entities/%s", properties.get("id")));
}
}
return results;
}
}));
return relationProjection;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.catalog.DefaultPropertyMapper;
import org.apache.atlas.catalog.PropertyMapper;
import org.apache.atlas.catalog.ResourceComparator;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.ProjectionResult;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import java.util.*;
/**
* Entity Tag resource definition.
*/
public class EntityTagResourceDefinition extends BaseResourceDefinition {
public static final String ENTITY_GUID_PROPERTY = "entity-guid";
public EntityTagResourceDefinition() {
registerProperty(TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE));
instanceProperties.add("name");
instanceProperties.add("description");
instanceProperties.add("creation_time");
collectionProperties.add("name");
collectionProperties.add("description");
projections.put("terms", getTermProjection());
}
@Override
public String getIdPropertyName() {
return "name";
}
//not meaningful for entity tags
@Override
public String getTypeName() {
return null;
}
@Override
public String resolveHref(Map<String, Object> properties) {
return String.format("v1/entities/%s/tags/%s", properties.get(ENTITY_GUID_PROPERTY), properties.get("name"));
}
private Projection getTermProjection() {
return new Projection("term", Projection.Cardinality.SINGLE,
new TransformFunctionPipe<>(new PipeFunction<VertexWrapper, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(VertexWrapper start) {
Map<String, Object> map = new TreeMap<>(new ResourceComparator());
StringBuilder sb = new StringBuilder();
sb.append("v1/taxonomies/");
String fullyQualifiedName = start.getVertex().getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
String[] paths = fullyQualifiedName.split("\\.");
// first path segment is the taxonomy
sb.append(paths[0]);
for (int i = 1; i < paths.length; ++i) {
String path = paths[i];
if (path != null && !path.isEmpty()) {
sb.append("/terms/");
sb.append(path);
}
}
map.put("href", sb.toString());
return Collections.singleton(new ProjectionResult("term", start,
Collections.singleton(map)));
}
}));
}
@Override
protected PropertyMapper createPropertyMapper() {
return new DefaultPropertyMapper(Collections.singletonMap(Constants.ENTITY_TYPE_PROPERTY_KEY, "name"),
Collections.singletonMap("name", Constants.ENTITY_TYPE_PROPERTY_KEY));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.catalog.PropertyMapper;
import org.apache.atlas.catalog.PropertyValueFormatter;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import java.util.Collection;
import java.util.Map;
/**
* Resource definition.
*/
public interface ResourceDefinition {
/**
* The type name of the resource.
*
* @return the resources type name
*/
String getTypeName();
/**
* Validate a user create request payload.
*
* @param request user create request
*
* @throws InvalidPayloadException if the request payload is invalid in any way
*/
void validateCreatePayload(Request request) throws InvalidPayloadException;
/**
* Validate a user update request payload.
*
* @param request user update request
*
* @throws InvalidPayloadException if the request payload is invalid in any way
*/
void validateUpdatePayload(Request request) throws InvalidPayloadException;
/**
* Get the name of the resources id property.
*
* @return the id property name
*/
String getIdPropertyName();
/**
* Get the property definitions for the resource.
*
* @return resource property definitions
*/
//todo: abstract usage of AttributeDefinition
Collection<AttributeDefinition> getPropertyDefinitions();
/**
* Filter out properties which shouldn't be returned in the result.
* The passed in map is directly modified as well as returned.
*
* @param request user request
* @param propertyMap property map to filter
*
* @return the filtered property map
*/
Map<String, Object> filterProperties(Request request, Map<String, Object> propertyMap);
/**
* Generate an href for the resource from the provided resource property map.
*
* @param properties resource property map
*
* @return a URL to be used as an href property value for the resource
*/
String resolveHref(Map<String, Object> properties);
/**
* Get map of resource projections.
*
* @return map of resource projections
*/
Map<String, Projection> getProjections();
/**
* Get map of resource relations.
*
* @return map of resource relations
*/
Map<String, Relation> getRelations();
/**
* Get the property mapper associated with the resource.
*
* @return associated property mapper
*/
PropertyMapper getPropertyMapper();
/**
* Get the registered property value formatters.
* @return map of property name to property value formatter
*/
Map<String, PropertyValueFormatter> getPropertyValueFormatters();
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TaxonomyResourceProvider;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.ProjectionResult;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import java.util.*;
/**
* Taxonomy resource definition.
*/
public class TaxonomyResourceDefinition extends BaseResourceDefinition {
public TaxonomyResourceDefinition() {
registerProperty(TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE));
registerProperty(TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE));
registerProperty(TypesUtil.createOptionalAttrDef(TaxonomyResourceProvider.NAMESPACE_ATTRIBUTE_NAME, DataTypes.STRING_TYPE));
//todo: combine with above registrations
instanceProperties.add("name");
instanceProperties.add("description");
instanceProperties.add("creation_time");
collectionProperties.add("name");
collectionProperties.add("description");
projections.put("terms", getTermsProjection());
}
@Override
public void validateCreatePayload(Request request) throws InvalidPayloadException {
super.validateCreatePayload(request);
if (String.valueOf(request.getQueryProperties().get("name")).contains(".")) {
throw new InvalidPayloadException("The \"name\" property may not contain the character '.'");
}
}
@Override
public String getTypeName() {
return "Taxonomy";
}
@Override
public String getIdPropertyName() {
return "name";
}
@Override
public String resolveHref(Map<String, Object> properties) {
return String.format("v1/taxonomies/%s", properties.get("name"));
}
private Projection getTermsProjection() {
final String termsProjectionName = "terms";
return new Projection(termsProjectionName, Projection.Cardinality.SINGLE,
new TransformFunctionPipe<>(new PipeFunction<VertexWrapper, Collection<ProjectionResult>>() {
private String baseHref = "v1/taxonomies/";
@Override
public Collection<ProjectionResult> compute(VertexWrapper v) {
Map<String, Object> map = new HashMap<>();
map.put("href", baseHref + v.getProperty("name") + "/terms");
return Collections.singleton(new ProjectionResult(termsProjectionName, v,
Collections.singleton(map)));
}
}));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.ResourceComparator;
import org.apache.atlas.catalog.TermPath;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.ProjectionResult;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.types.*;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import java.util.*;
/**
* Term resource definition.
*/
public class TermResourceDefinition extends BaseResourceDefinition {
public TermResourceDefinition() {
registerProperty(TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE));
registerProperty(TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE));
registerProperty(TypesUtil.createOptionalAttrDef("available_as_tag", DataTypes.BOOLEAN_TYPE));
registerProperty(TypesUtil.createOptionalAttrDef("acceptable_use", DataTypes.STRING_TYPE));
instanceProperties.add("name");
instanceProperties.add("description");
instanceProperties.add("creation_time");
instanceProperties.add("available_as_tag");
instanceProperties.add("acceptable_use");
collectionProperties.add("name");
collectionProperties.add("description");
projections.put("terms", getSubTermProjection());
projections.put("hierarchy", getHierarchyProjection());
}
@Override
public void validateCreatePayload(Request request) throws InvalidPayloadException {
super.validateCreatePayload(request);
String name = request.getProperty("name");
// name will be in the fully qualified form: taxonomyName.termName
if (! name.contains(".")) {
throw new InvalidPayloadException("Term name must be in the form 'taxonomyName.termName.subTermName'");
}
if (! request.getQueryProperties().containsKey("available_as_tag")) {
request.getQueryProperties().put("available_as_tag", true);
}
}
@Override
public String getTypeName() {
return "Term";
}
@Override
public String getIdPropertyName() {
return "name";
}
//todo
@Override
public String resolveHref(Map<String, Object> properties) {
StringBuilder sb = new StringBuilder();
sb.append("v1/taxonomies/");
TermPath termPath = new TermPath(String.valueOf(properties.get("name")));
String[] paths = termPath.getPathSegments();
sb.append(termPath.getTaxonomyName());
for (String path : paths) {
//todo: shouldn't need to check for null or empty after TermPath addition
if (path != null && !path.isEmpty()) {
sb.append("/terms/");
sb.append(path);
}
}
return sb.toString();
}
private Projection getHierarchyProjection() {
final String projectionName = "hierarchy";
return new Projection(projectionName, Projection.Cardinality.SINGLE,
new TransformFunctionPipe<>(new PipeFunction<VertexWrapper, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(VertexWrapper start) {
Map<String, Object> map = new TreeMap<>(new ResourceComparator());
TermPath termPath = new TermPath(start.getVertex().<String>getProperty(
Constants.ENTITY_TYPE_PROPERTY_KEY));
map.put("path", termPath.getPath());
map.put("short_name", termPath.getShortName());
map.put("taxonomy", termPath.getTaxonomyName());
return Collections.singleton(new ProjectionResult(projectionName, start,
Collections.singleton(map)));
}
}));
}
private Projection getSubTermProjection() {
//todo: combine with other term projections
final String termsProjectionName = "terms";
return new Projection(termsProjectionName, Projection.Cardinality.SINGLE,
new TransformFunctionPipe<>(new PipeFunction<VertexWrapper, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(VertexWrapper start) {
Map<String, Object> map = new TreeMap<>(new ResourceComparator());
StringBuilder sb = new StringBuilder();
sb.append("v1/taxonomies/");
TermPath termPath = new TermPath(start.getVertex().<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY));
String[] paths = termPath.getPathSegments();
sb.append(termPath.getTaxonomyName());
for (String path : paths) {
//todo: shouldn't need to check for null or empty after TermPath addition
if (path != null && !path.isEmpty()) {
sb.append("/terms/");
sb.append(path);
}
}
sb.append("/terms");
map.put("href", sb.toString());
return Collections.singleton(new ProjectionResult(termsProjectionName, start,
Collections.singleton(map)));
}
}));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
/**
* Base checked catalog exception.
*/
public class CatalogException extends Exception {
private int status;
public CatalogException(String message, int status) {
super(message);
this.status = status;
}
public int getStatus() {
return status;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
/**
* Base runtime catalog exception.
*/
public class CatalogRuntimeException extends RuntimeException {
int statusCode = 500;
public CatalogRuntimeException(Exception e) {
super("", e);
}
public CatalogRuntimeException(String message, Exception e) {
super(message, e);
}
public CatalogRuntimeException(String message, int statusCode) {
super(message);
this.statusCode = statusCode;
}
public int getStatusCode() {
return statusCode;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
import java.util.Collection;
/**
* Exception used for invalid API payloads.
*/
public class InvalidPayloadException extends CatalogException {
private final static String baseMsg = "Invalid Request.";
private final static String missingMsg = " The following required properties are missing: %s.";
private final static String unknownMsg = " The following properties are not supported: %s";
public InvalidPayloadException(Collection<String> missingProperties, Collection<String> unknownProperties) {
super(baseMsg + (!missingProperties.isEmpty() ? String.format(missingMsg, missingProperties): "") +
(!unknownProperties.isEmpty() ? String.format(unknownMsg, unknownProperties): ""), 400);
}
public InvalidPayloadException(String msg) {
super(msg, 400);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
/**
* Exception for invalid user query.
*/
public class InvalidQueryException extends CatalogException {
public InvalidQueryException(String message) {
super("Unable to parse query: " + message, 400);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
/**
* Exception used when an attempt is made to create a resource which already exists.
*/
public class ResourceAlreadyExistsException extends CatalogException {
public ResourceAlreadyExistsException(String message) {
super(message, 409);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
/**
* Exception used when an explicitly requested resource doesn't exist.
*/
public class ResourceNotFoundException extends CatalogException {
public ResourceNotFoundException(String message) {
super(message, 404);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.persistence.Id;
/**
* Provides functionality common across implementations.
*/
public abstract class BaseRelation implements Relation {
protected boolean isDeleted(Vertex v) {
return ! Id.EntityState.ACTIVE.name().equals(
v.<String>getProperty(Constants.STATE_PROPERTY_KEY));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/**
* Represents a generic relation
*/
public class GenericRelation extends BaseRelation {
private final ResourceDefinition resourceDefinition;
public GenericRelation(ResourceDefinition resourceDefinition) {
this.resourceDefinition = resourceDefinition;
}
@Override
public Collection<RelationSet> traverse(VertexWrapper vWrapper) {
Collection<RelationSet> relations = new ArrayList<>();
Vertex v = vWrapper.getVertex();
String vertexType = v.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
Map<String, Collection<VertexWrapper>> vertexMap = new HashMap<>();
for (Edge e : v.getEdges(Direction.OUT)) {
String edgeLabel = e.getLabel();
String edgePrefix = String.format("%s%s.", Constants.INTERNAL_PROPERTY_KEY_PREFIX, vertexType);
if (edgeLabel.startsWith(edgePrefix)) {
Vertex adjacentVertex = e.getVertex(Direction.IN);
if (! isDeleted(adjacentVertex)) {
VertexWrapper relationVertex = new VertexWrapper(adjacentVertex, resourceDefinition);
String relationName = edgeLabel.substring(edgePrefix.length());
Collection<VertexWrapper> vertices = vertexMap.get(relationName);
if (vertices == null) {
vertices = new ArrayList<>();
vertexMap.put(relationName, vertices);
}
vertices.add(relationVertex);
}
}
}
for (Map.Entry<String, Collection<VertexWrapper>> entry : vertexMap.entrySet()) {
relations.add(new RelationSet(entry.getKey(), entry.getValue()));
}
return relations;
}
@Override
public Pipe asPipe() {
return null;
}
@Override
public ResourceDefinition getResourceDefinition() {
return resourceDefinition;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.util.Pipeline;
import org.apache.atlas.catalog.VertexWrapper;
import java.util.Collection;
import java.util.Collections;
/**
* Projection representation.
* Used to project properties onto a resource from another source.
*/
public class Projection {
public enum Cardinality {SINGLE, MULTIPLE}
private final String m_name;
private final Cardinality m_cardinality;
protected Pipeline<VertexWrapper, Collection<ProjectionResult>> m_pipeline = new Pipeline<>();
public Projection(String name, Cardinality cardinality) {
m_name = name;
m_cardinality = cardinality;
}
public Projection(String name, Cardinality cardinality, Pipe<VertexWrapper, Collection<ProjectionResult>> pipe) {
m_name = name;
m_cardinality = cardinality;
m_pipeline.addPipe(pipe);
}
public Collection<ProjectionResult> values(VertexWrapper start) {
m_pipeline.setStarts(Collections.singleton(start));
return m_pipeline.iterator().next();
}
public void addPipe(Pipe<Collection<ProjectionResult>, Collection<ProjectionResult>> p) {
m_pipeline.addPipe(p);
}
public String getName() {
return m_name;
}
public Cardinality getCardinality() {
return m_cardinality;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import org.apache.atlas.catalog.VertexWrapper;
import java.util.Collection;
import java.util.Map;
/**
* Result of a projection.
*/
public class ProjectionResult {
private final VertexWrapper m_startVertex;
private final String m_name;
private final Collection<Map<String, Object>> m_propertyMaps;
public ProjectionResult(String name, VertexWrapper startingVertex, Collection<Map<String, Object>> propertyMaps) {
m_name = name;
m_startVertex = startingVertex;
m_propertyMaps = propertyMaps;
}
public String getName() {
return m_name;
}
public VertexWrapper getStartingVertex() {
return m_startVertex;
}
public Collection<Map<String, Object>> getPropertyMaps() {
return m_propertyMaps;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import java.util.Collection;
/**
* Represents the relationship from one vertex to another via an edge.
*/
public interface Relation {
/**
* Traverse the relation.
*
* @param vWrapper vertex to start traversal from
*
* @return results of the traversal
*/
Collection<RelationSet> traverse(VertexWrapper vWrapper);
/**
* Get the pipe representation of the traversal.
*
* @return pipe representation
*/
Pipe asPipe();
/**
* Get the associated resource definition.
*
* @return associated resource definition
*/
ResourceDefinition getResourceDefinition();
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.catalog.ResourceComparator;
import org.apache.atlas.catalog.VertexWrapper;
import java.util.*;
/**
* Projection based on a relation.
*/
public class RelationProjection extends Projection {
private Relation relation;
public RelationProjection(String name, final Collection<String> fields, final Relation relation, Cardinality cardinality) {
super(name, cardinality, new TransformFunctionPipe<>(
new PipeFunction<VertexWrapper, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(VertexWrapper start) {
Collection<ProjectionResult> projectionResults = new ArrayList<>();
for (RelationSet relationSet : relation.traverse(start)) {
Collection<Map<String, Object>> propertyMaps = new ArrayList<>();
for (VertexWrapper vWrapper : relationSet.getVertices()) {
Map<String, Object> propertyMap = new TreeMap<>(new ResourceComparator());
propertyMaps.add(propertyMap);
if (fields.isEmpty()) {
for (String property : vWrapper.getPropertyKeys()) {
propertyMap.put(property, vWrapper.<String>getProperty(property));
}
} else {
for (String property : fields) {
propertyMap.put(property, vWrapper.<String>getProperty(property));
}
}
}
projectionResults.add(new ProjectionResult(relationSet.getName(), start, propertyMaps));
}
return projectionResults;
}
}));
this.relation = relation;
}
public Relation getRelation() {
return relation;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import org.apache.atlas.catalog.VertexWrapper;
import java.util.*;
/**
* Encapsulates the response of a relation traversal.
*/
public class RelationSet {
private final String m_name;
private final Collection<VertexWrapper> m_vertices;
public RelationSet(String name, Collection<VertexWrapper> vertices) {
m_name = name;
m_vertices = vertices;
}
public String getName() {
return m_name;
}
public Collection<VertexWrapper> getVertices() {
return Collections.unmodifiableCollection(m_vertices);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.filter.FilterFunctionPipe;
import org.apache.atlas.catalog.TermVertexWrapper;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
/**
* Relation for adjacent Tag vertices.
*/
public class TagRelation extends BaseRelation {
private static ResourceDefinition resourceDefinition = new EntityTagResourceDefinition();
@Override
public Collection<RelationSet> traverse(VertexWrapper vWrapper) {
Vertex v = vWrapper.getVertex();
Collection<VertexWrapper> vertices = new ArrayList<>();
for (Edge e : v.getEdges(Direction.OUT)) {
if (e.getLabel().startsWith(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY))) {
VertexWrapper trait = new TermVertexWrapper(e.getVertex(Direction.IN));
if (trait.getPropertyKeys().contains("available_as_tag") && ! isDeleted(trait.getVertex())) {
vertices.add(trait);
}
}
}
return Collections.singletonList(new RelationSet("tags", vertices));
}
@Override
public Pipe asPipe() {
return new FilterFunctionPipe<>(new PipeFunction<Edge, Boolean>() {
@Override
public Boolean compute(Edge edge) {
String name = edge.getVertex(Direction.OUT).getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
if (edge.getLabel().startsWith(name)) {
VertexWrapper v = new TermVertexWrapper(edge.getVertex(Direction.IN));
return v.getPropertyKeys().contains("available_as_tag") && ! isDeleted(v.getVertex());
} else {
return false;
}
}
});
}
@Override
public ResourceDefinition getResourceDefinition() {
return resourceDefinition;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.filter.FilterFunctionPipe;
import org.apache.atlas.catalog.TermVertexWrapper;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
/**
* Trait specific relation.
*/
//todo: combine with TagRelation
public class TraitRelation extends BaseRelation {
//todo: for now using entity tag resource definition
private static ResourceDefinition resourceDefinition = new EntityTagResourceDefinition();
@Override
public Collection<RelationSet> traverse(VertexWrapper vWrapper) {
Vertex v = vWrapper.getVertex();
Collection<VertexWrapper> vertices = new ArrayList<>();
for (Edge e : v.getEdges(Direction.OUT)) {
if (e.getLabel().startsWith(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY))) {
VertexWrapper trait = new TermVertexWrapper(e.getVertex(Direction.IN));
if (! trait.getPropertyKeys().contains("available_as_tag") && ! isDeleted(trait.getVertex())) {
vertices.add(trait);
}
}
}
return Collections.singletonList(new RelationSet("traits", vertices));
}
@Override
public Pipe asPipe() {
return new FilterFunctionPipe<>(new PipeFunction<Edge, Boolean>() {
@Override
public Boolean compute(Edge edge) {
String name = edge.getVertex(Direction.OUT).getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
if (edge.getLabel().startsWith(name)) {
VertexWrapper v = new TermVertexWrapper(edge.getVertex(Direction.IN));
return ! v.getPropertyKeys().contains("available_as_tag") && ! isDeleted(v.getVertex());
} else {
return false;
}
}
});
}
@Override
public ResourceDefinition getResourceDefinition() {
return resourceDefinition;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.VertexWrapper;
/**
* Query expression which always returns true.
*/
public class AlwaysQueryExpression extends BaseQueryExpression {
protected AlwaysQueryExpression() {
super(null, null, null);
}
@Override
public Pipe asPipe() {
return null;
}
@Override
public boolean evaluate(VertexWrapper vWrapper) {
return ! negate;
}
@Override
public boolean evaluate(Object value) {
return true;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
/**
* Entity resource query.
*/
public class AtlasEntityQuery extends BaseQuery {
public AtlasEntityQuery(QueryExpression queryExpression, ResourceDefinition resourceDefinition, Request request) {
super(queryExpression, resourceDefinition, request);
}
protected Pipe getQueryPipe() {
return new GremlinPipeline().has(Constants.ENTITY_TEXT_PROPERTY_KEY).
hasNot(Constants.ENTITY_TYPE_PROPERTY_KEY, "Taxonomy");
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.filter.FilterFunctionPipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TermVertexWrapper;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.HashMap;
import java.util.Map;
/**
* Entity Tag resource query.
*/
public class AtlasEntityTagQuery extends BaseQuery {
private final String guid;
public AtlasEntityTagQuery(QueryExpression queryExpression, ResourceDefinition resourceDefinition, String guid, Request request) {
super(queryExpression, resourceDefinition, request);
this.guid = guid;
}
@Override
protected Pipe getQueryPipe() {
GremlinPipeline p;
if (guid.equals("*")) {
p = new GremlinPipeline().has(Constants.ENTITY_TEXT_PROPERTY_KEY).
hasNot(Constants.ENTITY_TYPE_PROPERTY_KEY, "Taxonomy").outE();
} else {
p = new GremlinPipeline().has(Constants.GUID_PROPERTY_KEY, guid).outE();
}
//todo: this is basically the same pipeline used in TagRelation.asPipe()
p.add(new FilterFunctionPipe<>(new PipeFunction<Edge, Boolean>() {
@Override
public Boolean compute(Edge edge) {
String type = edge.getVertex(Direction.OUT).getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
VertexWrapper v = new TermVertexWrapper(edge.getVertex(Direction.IN));
return edge.getLabel().startsWith(type) && v.getPropertyKeys().contains("available_as_tag");
}
}));
return p.inV();
}
//todo: duplication of effort with resource definition
@Override
protected void addHref(VertexWrapper vWrapper, Map<String, Object> filteredPropertyMap) {
Map<String, Object> map = new HashMap<>(filteredPropertyMap);
if (guid.equals("*")) {
map.put(EntityTagResourceDefinition.ENTITY_GUID_PROPERTY, vWrapper.getVertex().getEdges(Direction.IN).
iterator().next().getVertex(Direction.OUT).getProperty(Constants.GUID_PROPERTY_KEY));
} else {
map.put(EntityTagResourceDefinition.ENTITY_GUID_PROPERTY, guid);
}
String href = resourceDefinition.resolveHref(map);
if (href != null) {
filteredPropertyMap.put("href", href);
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import java.util.Collection;
import java.util.Map;
/**
* Query functionality.
*/
public interface AtlasQuery {
/**
* Execute the query.
*
* @return collection of property maps, one per matching resource
* @throws ResourceNotFoundException if an explicitly specified resource doesn't exist
*/
Collection<Map<String, Object>> execute() throws ResourceNotFoundException;
/**
* Execute the query and update the results with the provided properties.
*
* @param updateProperties properties name/values to update on query results
*
* @return collection of property maps, one per matching resource
* @throws ResourceNotFoundException if an explicitly specified resource doesn't exist
*/
Collection<Map<String, Object>> execute(Map<String, Object> updateProperties) throws ResourceNotFoundException;
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.definition.ResourceDefinition;
/**
* Taxonomy resource query.
*/
public class AtlasTaxonomyQuery extends BaseQuery {
public AtlasTaxonomyQuery(QueryExpression queryExpression, ResourceDefinition resourceDefinition, Request request) {
super(queryExpression, resourceDefinition, request);
}
@Override
protected Pipe getQueryPipe() {
return new GremlinPipeline().has("__typeName", "Taxonomy");
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.thinkaurelius.titan.core.attribute.Text;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TermPath;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
/**
* Term resource query.
*/
public class AtlasTermQuery extends BaseQuery {
private final TermPath termPath;
public AtlasTermQuery(QueryExpression queryExpression, ResourceDefinition resourceDefinition, TermPath termPath, Request request) {
super(queryExpression, resourceDefinition, request);
this.termPath = termPath;
}
@Override
protected Pipe getQueryPipe() {
GremlinPipeline p;
if (termPath.getTaxonomyName().equals("*")) {
p = new GremlinPipeline().has("Taxonomy.name").out();
} else {
p = new GremlinPipeline().has("Taxonomy.name", termPath.getTaxonomyName()).out().
has(Constants.ENTITY_TYPE_PROPERTY_KEY, Text.PREFIX, termPath.getFullyQualifiedName());
}
return p;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.ProjectionResult;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.AtlasGraphProvider;
import org.apache.atlas.repository.graphdb.AtlasElement;
import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.typesystem.persistence.Id;
import com.tinkerpop.blueprints.Compare;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.filter.PropertyFilterPipe;
/**
* Base Query implementation.
*/
public abstract class BaseQuery implements AtlasQuery {
protected final QueryExpression queryExpression;
protected final ResourceDefinition resourceDefinition;
protected final Request request;
public BaseQuery(QueryExpression queryExpression, ResourceDefinition resourceDefinition, Request request) {
this.queryExpression = queryExpression;
this.resourceDefinition = resourceDefinition;
this.request = request;
}
public Collection<Map<String, Object>> execute() throws ResourceNotFoundException {
Collection<Map<String, Object>> resultMaps = new ArrayList<>();
try {
for (Vertex vertex : executeQuery()) {
resultMaps.add(processPropertyMap(wrapVertex(vertex)));
}
getGraph().commit();
} catch (Throwable t) {
getGraph().rollback();
throw t;
}
return resultMaps;
}
@Override
public Collection<Map<String, Object>> execute(Map<String, Object> updateProperties)
throws ResourceNotFoundException {
Collection<Map<String, Object>> resultMaps = new ArrayList<>();
try {
for (Vertex vertex : executeQuery()) {
VertexWrapper vWrapper = wrapVertex(vertex);
for (Map.Entry<String, Object> property : updateProperties.entrySet()) {
vWrapper.setProperty(property.getKey(), property.getValue());
vWrapper.setProperty(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, System.currentTimeMillis());
}
resultMaps.add(processPropertyMap(vWrapper));
}
getGraph().commit();
} catch (Throwable e) {
getGraph().rollback();
throw e;
}
return resultMaps;
}
private List<Vertex> executeQuery() {
GremlinPipeline pipeline = buildPipeline().as("root");
Pipe expressionPipe = queryExpression.asPipe();
// AlwaysQuery returns null for pipe
return expressionPipe == null ? pipeline.toList() :
pipeline.add(expressionPipe).back("root").toList();
}
protected GremlinPipeline buildPipeline() {
GremlinPipeline pipeline = getRootVertexPipeline();
Pipe queryPipe = getQueryPipe();
if (queryPipe != null) {
pipeline.add(queryPipe);
}
pipeline.add(getNotDeletedPipe());
return pipeline;
}
protected abstract Pipe getQueryPipe();
protected GremlinPipeline getRootVertexPipeline() {
return new GremlinPipeline(unWrapVertices());
}
protected Iterable<Object> unWrapVertices() {
final Iterable<AtlasVertex> vertices = getGraph().getVertices();
Iterable<Object> vertexIterable = new Iterable<Object>() {
Iterator<Object> iterator = new Iterator<Object>() {
Iterator<AtlasVertex> wrapperIterator = vertices.iterator();
@Override
public boolean hasNext() {
return wrapperIterator.hasNext();
}
@Override
public Object next() {
if (hasNext()) {
return ((AtlasElement) wrapperIterator.next().getV()).getWrappedElement();
} else {
throw new NoSuchElementException();
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("Remove not supported");
}
};
@Override
public Iterator<Object> iterator() {
return iterator;
}
};
return vertexIterable;
}
protected Pipe getNotDeletedPipe() {
return new PropertyFilterPipe(Constants.STATE_PROPERTY_KEY, Compare.EQUAL,
Id.EntityState.ACTIVE.name());
}
protected Map<String, Object> processPropertyMap(VertexWrapper vertex) {
Map<String, Object> propertyMap = resourceDefinition.filterProperties(
request, vertex.getPropertyMap());
addHref(vertex, propertyMap);
return request.getCardinality() == Request.Cardinality.INSTANCE ?
applyProjections(vertex, propertyMap) :
propertyMap;
}
protected void addHref(VertexWrapper vWrapper, Map<String, Object> filteredPropertyMap) {
String href = resourceDefinition.resolveHref(filteredPropertyMap);
if (href != null) {
filteredPropertyMap.put("href", href);
}
}
protected Map<String, Object> applyProjections(VertexWrapper vertex, Map<String, Object> propertyMap) {
for (Projection p : resourceDefinition.getProjections().values()) {
for (ProjectionResult projectionResult : p.values(vertex)) {
if (p.getCardinality() == Projection.Cardinality.MULTIPLE) {
propertyMap.put(projectionResult.getName(), projectionResult.getPropertyMaps());
} else {
for (Map<String, Object> projectionMap : projectionResult.getPropertyMaps()) {
propertyMap.put(projectionResult.getName(), projectionMap);
}
}
}
}
return propertyMap;
}
protected QueryExpression getQueryExpression() {
return queryExpression;
}
protected ResourceDefinition getResourceDefinition() {
return resourceDefinition;
}
protected Request getRequest() {
return request;
}
// Underlying method is synchronized and caches the graph in a static field
protected AtlasGraph getGraph() {
return AtlasGraphProvider.getGraphInstance();
}
protected VertexWrapper wrapVertex(Vertex v) {
return new VertexWrapper(v, resourceDefinition);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.filter.FilterFunctionPipe;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import java.util.Collection;
import java.util.HashSet;
/**
* Base query expression class.
*/
public abstract class BaseQueryExpression implements QueryExpression {
protected String m_field;
protected final String m_expectedValue;
protected final ResourceDefinition resourceDefinition;
protected boolean negate = false;
protected Collection<String> properties = new HashSet<>();
protected BaseQueryExpression(String field, String expectedValue, ResourceDefinition resourceDefinition) {
m_field = field;
if (field != null) {
properties.add(field);
}
m_expectedValue = expectedValue;
this.resourceDefinition = resourceDefinition;
}
@Override
public boolean evaluate(VertexWrapper vWrapper) {
return negate ^ evaluate(vWrapper.getProperty(m_field));
}
@Override
public Collection<String> getProperties() {
return properties;
}
@Override
public boolean evaluate(Object value) {
// subclasses which don't override evaluate(VertexWrapper) should implement this
return false;
}
//todo: use 'has' instead of closure where possible for performance
public Pipe asPipe() {
return new FilterFunctionPipe(new PipeFunction<Vertex, Boolean>() {
@Override
public Boolean compute(Vertex vertex) {
return evaluate(new VertexWrapper(vertex, resourceDefinition));
}
});
}
@Override
public String getField() {
return m_field;
}
@Override
public String getExpectedValue() {
return m_expectedValue;
}
@Override
public void setField(String field) {
m_field = field;
}
@Override
public void setNegate() {
this.negate = true;
}
@Override
public boolean isNegate() {
return negate;
}
@Override
public boolean isProjectionExpression() {
return getField() != null && getField().contains(QueryFactory.PATH_SEP_TOKEN);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.filter.AndFilterPipe;
import com.tinkerpop.pipes.filter.OrFilterPipe;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import java.util.*;
/**
* Expression where operands are other expressions and operator is logical AND or OR
*/
public class BooleanQueryExpression extends BaseQueryExpression {
private final BooleanClause[] clauses;
private final QueryFactory queryFactory;
public BooleanQueryExpression(BooleanQuery query, ResourceDefinition resourceDefinition, QueryFactory queryFactory) {
super(null, null, resourceDefinition);
clauses = query.getClauses();
this.queryFactory = queryFactory;
}
@Override
public Pipe asPipe() {
Map<BooleanClause.Occur, Collection<BooleanClause>> groupedClauses = groupClauses();
Pipe andPipe = null;
Collection<Pipe> andPipes = processAndClauses(groupedClauses);
andPipes.addAll(processNotClauses(groupedClauses));
if (! andPipes.isEmpty()) {
andPipe = new AndFilterPipe(andPipes.toArray(new Pipe[andPipes.size()]));
}
Collection<Pipe> orPipes = processOrClauses(groupedClauses);
if (! orPipes.isEmpty()) {
if (andPipe != null) {
orPipes.add(andPipe);
}
return new OrFilterPipe(orPipes.toArray(new Pipe[orPipes.size()]));
} else {
return andPipe;
}
}
private Map<BooleanClause.Occur, Collection<BooleanClause>> groupClauses() {
Map<BooleanClause.Occur, Collection<BooleanClause>> groupedClauses = new HashMap<>();
for (BooleanClause clause : clauses) {
BooleanClause.Occur occur = resolveClauseOccur(clause);
Collection<BooleanClause> clauseGrouping = groupedClauses.get(occur);
if (clauseGrouping == null) {
clauseGrouping = new ArrayList<>();
groupedClauses.put(occur, clauseGrouping);
}
clauseGrouping.add(clause);
}
return groupedClauses;
}
private BooleanClause.Occur resolveClauseOccur(BooleanClause clause) {
BooleanClause.Occur occur = clause.getOccur();
if (negate) {
switch (occur) {
case SHOULD:
occur = BooleanClause.Occur.MUST_NOT;
break;
case MUST:
occur = BooleanClause.Occur.SHOULD;
break;
case MUST_NOT:
occur = BooleanClause.Occur.SHOULD;
break;
}
}
return occur;
}
private Collection<Pipe> processAndClauses(Map<BooleanClause.Occur, Collection<BooleanClause>> groupedClauses) {
Collection<BooleanClause> andClauses = groupedClauses.get(BooleanClause.Occur.MUST);
Collection<Pipe> andPipes = new ArrayList<>();
if (andClauses != null) {
for (BooleanClause andClause : andClauses) {
QueryExpression queryExpression = queryFactory.create(andClause.getQuery(), resourceDefinition);
properties.addAll(queryExpression.getProperties());
andPipes.add(queryExpression.asPipe());
}
}
return andPipes;
}
private Collection<Pipe> processOrClauses(Map<BooleanClause.Occur, Collection<BooleanClause>> groupedClauses) {
Collection<BooleanClause> shouldClauses = groupedClauses.get(BooleanClause.Occur.SHOULD);
Collection<Pipe> orPipes = new ArrayList<>();
if (shouldClauses != null) {
for (BooleanClause shouldClause : shouldClauses) {
QueryExpression queryExpression = queryFactory.create(shouldClause.getQuery(), resourceDefinition);
// don't negate expression if we negated MUST_NOT -> SHOULD
if (negate && shouldClause.getOccur() != BooleanClause.Occur.MUST_NOT) {
queryExpression.setNegate();
}
properties.addAll(queryExpression.getProperties());
orPipes.add(queryExpression.asPipe());
}
}
return orPipes;
}
private Collection<Pipe> processNotClauses(Map<BooleanClause.Occur, Collection<BooleanClause>> groupedClauses) {
Collection<BooleanClause> notClauses = groupedClauses.get(BooleanClause.Occur.MUST_NOT);
Collection<Pipe> notPipes = new ArrayList<>();
if (notClauses != null) {
for (BooleanClause notClause : notClauses) {
QueryExpression queryExpression = queryFactory.create(notClause.getQuery(), resourceDefinition);
queryExpression.setNegate();
properties.addAll(queryExpression.getProperties());
notPipes.add(queryExpression.asPipe());
}
}
return notPipes;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.search.PrefixQuery;
/**
* Expression that evaluates whether a property starts with a prefix.
*/
public class PrefixQueryExpression extends BaseQueryExpression {
// query 'f*' results in a PrefixQuery
public PrefixQueryExpression(PrefixQuery query, ResourceDefinition resourceDefinition) {
super(query.getPrefix().field(), query.getPrefix().text(), resourceDefinition);
}
@Override
public boolean evaluate(Object value) {
return value != null && String.valueOf(value).startsWith(getExpectedValue());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.thinkaurelius.titan.core.attribute.Text;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.filter.FilterFunctionPipe;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.projection.ProjectionResult;
import org.apache.atlas.catalog.projection.Relation;
import java.util.*;
/**
* Query expression wrapper which handles projection queries.
*/
public class ProjectionQueryExpression extends BaseQueryExpression {
private final QueryExpression underlyingExpression;
private final ResourceDefinition resourceDefinition;
private final String[] fieldSegments;
protected ProjectionQueryExpression(QueryExpression underlyingExpression, ResourceDefinition resourceDefinition) {
super(underlyingExpression.getField(), underlyingExpression.getExpectedValue(), resourceDefinition);
this.underlyingExpression = underlyingExpression;
this.resourceDefinition = resourceDefinition;
this.fieldSegments = getField().split(QueryFactory.PATH_SEP_TOKEN);
}
@Override
public Pipe asPipe() {
//todo: encapsulate all of this path logic including path sep escaping and normalizing
final int sepIdx = getField().indexOf(QueryFactory.PATH_SEP_TOKEN);
final String edgeToken = getField().substring(0, sepIdx);
GremlinPipeline pipeline = new GremlinPipeline();
Relation relation = resourceDefinition.getRelations().get(fieldSegments[0]);
if (relation != null) {
pipeline = pipeline.outE();
pipeline.add(relation.asPipe()).inV();
} else {
if (resourceDefinition.getProjections().get(fieldSegments[0]) != null) {
return super.asPipe();
} else {
//todo: default Relation implementation
pipeline = pipeline.outE().has("label", Text.REGEX, String.format(".*\\.%s", edgeToken)).inV();
}
}
//todo: set resource definition from relation on underlying expression where appropriate
String childFieldName = getField().substring(sepIdx + QueryFactory.PATH_SEP_TOKEN.length());
underlyingExpression.setField(childFieldName);
Pipe childPipe;
if (childFieldName.contains(QueryFactory.PATH_SEP_TOKEN)) {
childPipe = new ProjectionQueryExpression(underlyingExpression, resourceDefinition).asPipe();
} else {
childPipe = underlyingExpression.asPipe();
}
pipeline.add(childPipe);
return negate ? new FilterFunctionPipe(new ExcludePipeFunction(pipeline)) : pipeline;
}
@Override
public boolean evaluate(VertexWrapper vWrapper) {
boolean result = false;
Iterator<ProjectionResult> projectionIterator = resourceDefinition.getProjections().
get(fieldSegments[0]).values(vWrapper).iterator();
while (! result && projectionIterator.hasNext()) {
ProjectionResult projectionResult = projectionIterator.next();
for (Map<String, Object> propertyMap : projectionResult.getPropertyMaps()) {
Object val = propertyMap.get(fieldSegments[1]);
if (val != null && underlyingExpression.evaluate(QueryFactory.escape(val))) {
result = true;
break;
}
}
}
return negate ^ result;
}
private static class ExcludePipeFunction implements PipeFunction<Object, Boolean> {
private final GremlinPipeline excludePipeline;
public ExcludePipeFunction(GremlinPipeline excludePipeline) {
this.excludePipeline = excludePipeline;
}
@Override
public Boolean compute(Object vertices) {
GremlinPipeline p = new GremlinPipeline(Collections.singleton(vertices));
p.add(excludePipeline);
return p.gather().toList().isEmpty();
}
}
protected QueryExpression getUnderlyingExpression() {
return underlyingExpression;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.VertexWrapper;
import java.util.Collection;
/**
* Represents a query expression.
*/
public interface QueryExpression {
/**
* Evaluate the expression based on properties of the provied vertex.
*
* @param vWrapper vertex wrapper that expression is applied to
* @return result of expression evaluation
*/
boolean evaluate(VertexWrapper vWrapper);
/**
* Evaluate the expression based on the provided value.
*
* @param value value used to evaluate expression
* @return
*/
boolean evaluate(Object value);
/**
* Get the complete set of properties which are contained in the expression.
*
* @return collection of expression properties
*/
Collection<String> getProperties();
/**
* Get the pipe representation of the expression.
*
* @return pipe representation
*/
Pipe asPipe();
/**
* Negate the expression.
*/
void setNegate();
/**
* Get the negate status of the expression.
*
* @return true if the expression is negated, false otherwise
*/
boolean isNegate();
/**
* Determine whether the expression is being applied to a projection.
*
* @return true if expression is being applied to a projection, false otherwise
*/
boolean isProjectionExpression();
/**
* Get the field name used in the expression.
*
* @return expression field name or null if there is no field name
*/
String getField();
/**
* Set the expressions field name.
*
* @param fieldName field name
*/
void setField(String fieldName);
/**
* Get the expected value for the expression.
*
* @return expected value or null if there isn't a expected value
*/
String getExpectedValue();
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TermPath;
import org.apache.atlas.catalog.definition.*;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.catalog.exception.InvalidQueryException;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.sandbox.queries.regex.RegexQuery;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
/**
* Factory used to create QueryAdapter instances.
*/
public class QueryFactory {
private static final Logger LOG = LoggerFactory.getLogger(QueryFactory.class);
public static final String PATH_SEP_TOKEN = "__slash__";
private final Map<Class<? extends Query>, ExpressionCreateFunction<? extends Query>>
expressionCreateFunctions = new HashMap<>();
public QueryFactory() {
registerExpressionCreateFunctions();
}
public AtlasQuery createTaxonomyQuery(Request request) throws InvalidQueryException {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
QueryExpression queryExpression = create(request, taxonomyDefinition);
return new AtlasTaxonomyQuery(queryExpression, taxonomyDefinition, request);
}
public AtlasQuery createTermQuery(Request request) throws InvalidQueryException {
ResourceDefinition termDefinition = new TermResourceDefinition();
QueryExpression queryExpression = create(request, termDefinition);
TermPath termPath = request.getProperty("termPath");
return new AtlasTermQuery(queryExpression, termDefinition, termPath, request);
}
public AtlasQuery createEntityQuery(Request request) throws InvalidQueryException {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
QueryExpression queryExpression = create(request, entityDefinition);
return new AtlasEntityQuery(queryExpression, entityDefinition, request);
}
public AtlasQuery createEntityTagQuery(Request request) throws InvalidQueryException {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
QueryExpression queryExpression = create(request, entityTagDefinition);
String guid = request.getProperty("id");
return new AtlasEntityTagQuery(queryExpression, entityTagDefinition, guid, request);
}
private QueryExpression create(Request request, ResourceDefinition resourceDefinition) throws InvalidQueryException {
String queryString;
if (request.getCardinality() == Request.Cardinality.INSTANCE) {
String idPropertyName = resourceDefinition.getIdPropertyName();
queryString = String.format("%s:%s", idPropertyName, request.<String>getProperty(idPropertyName));
} else {
queryString = request.getQueryString();
}
QueryExpression queryExpression;
if (queryString != null && !queryString.isEmpty()) {
QueryParser queryParser = new QueryParser(Version.LUCENE_48, "name", new KeywordAnalyzer());
queryParser.setLowercaseExpandedTerms(false);
queryParser.setAllowLeadingWildcard(true);
Query query;
try {
query = queryParser.parse((String) escape(queryString));
} catch (ParseException e) {
throw new InvalidQueryException(e.getMessage());
}
LOG.info("LuceneQuery: {}", query);
queryExpression = create(query, resourceDefinition);
} else {
queryExpression = new AlwaysQueryExpression();
}
// add query properties to request so that they are returned
request.addAdditionalSelectProperties(queryExpression.getProperties());
return queryExpression;
}
@SuppressWarnings("unchecked")
protected <T extends Query> QueryExpression create(T query, ResourceDefinition resourceDefinition) {
if (! expressionCreateFunctions.containsKey(query.getClass())) {
throw new CatalogRuntimeException("Query type currently not supported: " + query.getClass(), 400);
}
//todo: fix generic typing
ExpressionCreateFunction expressionCreateFunction = expressionCreateFunctions.get(query.getClass());
return expressionCreateFunction.createExpression(query, resourceDefinition);
}
// "escapes" characters as necessary for lucene parser
//todo: currently '/' characters are blindly being replaced but this will not allow regex queries to be used
protected static Object escape(Object val) {
if (val instanceof String) {
return ((String)val).replaceAll("/", PATH_SEP_TOKEN);
} else {
return val;
}
}
private abstract static class ExpressionCreateFunction<T extends Query> {
QueryExpression createExpression(T query, ResourceDefinition resourceDefinition) {
QueryExpression expression = create(query, resourceDefinition);
return expression.isProjectionExpression() ?
new ProjectionQueryExpression(expression, resourceDefinition) :
expression;
}
protected abstract QueryExpression create(T query, ResourceDefinition resourceDefinition);
}
private void registerExpressionCreateFunctions() {
expressionCreateFunctions.put(WildcardQuery.class, new ExpressionCreateFunction<WildcardQuery>() {
@Override
public QueryExpression create(WildcardQuery query, ResourceDefinition definition) {
return new WildcardQueryExpression(query, definition);
}
});
expressionCreateFunctions.put(PrefixQuery.class, new ExpressionCreateFunction<PrefixQuery>() {
@Override
public QueryExpression create(PrefixQuery query, ResourceDefinition definition) {
return new PrefixQueryExpression(query, definition);
}
});
expressionCreateFunctions.put(TermQuery.class, new ExpressionCreateFunction<TermQuery>() {
@Override
public QueryExpression create(TermQuery query, ResourceDefinition definition) {
return new TermQueryExpression(query, definition);
}
});
expressionCreateFunctions.put(TermRangeQuery.class, new ExpressionCreateFunction<TermRangeQuery>() {
@Override
public QueryExpression create(TermRangeQuery query, ResourceDefinition definition) {
return new TermRangeQueryExpression(query, definition);
}
});
expressionCreateFunctions.put(RegexQuery.class, new ExpressionCreateFunction<RegexQuery>() {
@Override
public QueryExpression create(RegexQuery query, ResourceDefinition definition) {
return new RegexQueryExpression(query, definition);
}
});
expressionCreateFunctions.put(BooleanQuery.class, new ExpressionCreateFunction<BooleanQuery>() {
@Override
public QueryExpression create(BooleanQuery query, ResourceDefinition definition) {
return new BooleanQueryExpression(query, definition, QueryFactory.this);
}
});
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.sandbox.queries.regex.RegexQuery;
import java.util.regex.Pattern;
/**
* Query expression which evaluates a property against a regular expression.
*/
public class RegexQueryExpression extends BaseQueryExpression {
public RegexQueryExpression(RegexQuery query, ResourceDefinition resourceDefinition) {
super(query.getField(), query.getTerm().text(), resourceDefinition);
}
@Override
public boolean evaluate(Object value) {
Pattern p = Pattern.compile(getExpectedValue());
return value != null && p.matcher(String.valueOf(value)).matches();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.search.TermQuery;
import java.util.Collection;
/**
* Query expression which evaluates whether a property equals a value.
*/
public class TermQueryExpression extends BaseQueryExpression {
public TermQueryExpression(TermQuery query, ResourceDefinition resourceDefinition) {
super(query.getTerm().field(), query.getTerm().text(), resourceDefinition);
}
@Override
public boolean evaluate(Object value) {
String expectedValue = getExpectedValue();
if (value == null) {
return expectedValue.equals("null");
//todo: refactor; we shouldn't need to use instanceof/cast here
} else if (value instanceof Collection) {
return ((Collection)value).contains(expectedValue);
} else {
return expectedValue.equals(QueryFactory.escape(String.valueOf(value)));
}
}
public String getExpectedValue() {
return m_expectedValue;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
/**
* Query expression which evaluates whether a property value is within a range.
*/
//todo: for month and year which are expressed via a single digit, must ensure that
//todo: a leading '0' is provided. For example, "2016-1-5" must be converted to "2016-01-05".
//todo: Month and day values aren't currently validated.
public class TermRangeQueryExpression extends BaseQueryExpression {
private final BytesRef m_lowerTerm;
private final BytesRef m_upperTerm;
private final boolean m_lowerInclusive;
private final boolean m_upperInclusive;
public TermRangeQueryExpression(TermRangeQuery query, ResourceDefinition resourceDefinition) {
super(query.getField(), null, resourceDefinition);
m_lowerTerm = query.getLowerTerm();
m_upperTerm = query.getUpperTerm();
m_lowerInclusive = query.includesLower();
m_upperInclusive = query.includesUpper();
}
@Override
public boolean evaluate(Object value) {
BytesRef valueBytes = new BytesRef(String.valueOf(value));
return compareLowerBound(valueBytes) && compareUpperBound(valueBytes);
}
private boolean compareLowerBound(BytesRef valueBytes) {
return m_lowerTerm == null || (m_lowerInclusive ? valueBytes.compareTo(m_lowerTerm) > 0 :
valueBytes.compareTo(m_lowerTerm) >= 0);
}
private boolean compareUpperBound(BytesRef valueBytes) {
return m_upperTerm == null || (m_upperInclusive ? valueBytes.compareTo(m_upperTerm) < 0 :
valueBytes.compareTo(m_upperTerm) <= 0);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.search.WildcardQuery;
import java.util.regex.Pattern;
/**
* Query expression which evaluates values with wildcards.
* This differs from PrefixQueryExpression which handles expressions which end with a wildcard.
*/
public class WildcardQueryExpression extends BaseQueryExpression {
public WildcardQueryExpression(WildcardQuery query, ResourceDefinition resourceDefinition) {
super(query.getTerm().field(), query.getTerm().text(), resourceDefinition);
}
@Override
public boolean evaluate(Object value) {
// replace '*' with ".*"
// replace '?' with '.'
String regex = getExpectedValue().replaceAll("\\*", ".*").replaceAll("\\?", ".");
return Pattern.compile(regex).matcher(String.valueOf(value)).matches();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.testng.annotations.Test;
import java.util.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for CollectionRequest.
*/
public class CollectionRequestTest {
@Test
public void testNoProperties() {
String query = "name:foo*";
Request request = new CollectionRequest(null, query);
assertEquals(Request.Cardinality.COLLECTION, request.getCardinality());
assertTrue(request.getQueryProperties().isEmpty());
assertNull(request.getProperty("foo"));
assertTrue(request.getAdditionalSelectProperties().isEmpty());
}
@Test
public void testWithProperties() {
String query = "name:foo*";
Map<String, Object> properties = new HashMap<>();
properties.put("foo", "fooValue");
properties.put("someBoolean", true);
Request request = new CollectionRequest(properties, query);
assertEquals(Request.Cardinality.COLLECTION, request.getCardinality());
assertEquals(properties, request.getQueryProperties());
assertEquals("fooValue", request.getProperty("foo"));
assertTrue(request.<Boolean>getProperty("someBoolean"));
assertNull(request.getProperty("other"));
assertTrue(request.getAdditionalSelectProperties().isEmpty());
}
@Test
public void testSelectProperties() {
String query = "name:foo*";
Request request = new CollectionRequest(null, query);
Collection<String> additionalSelectProps = new ArrayList<>();
additionalSelectProps.add("foo");
additionalSelectProps.add("bar");
request.addAdditionalSelectProperties(additionalSelectProps);
Collection<String> requestAdditionalSelectProps = request.getAdditionalSelectProperties();
assertEquals(2, requestAdditionalSelectProps.size());
assertTrue(requestAdditionalSelectProps.contains("foo"));
assertTrue(requestAdditionalSelectProps.contains("bar"));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.testng.annotations.Test;
import java.util.Calendar;
import java.util.GregorianCalendar;
import static org.testng.Assert.assertEquals;
/**
* Unit tests for DefaultDateFormatter.
*/
public class DefaultDateFormatterTest {
@Test
public void test() {
Calendar calendar = new GregorianCalendar(2016, 0, 20, 5, 10, 15);
long millis = calendar.getTimeInMillis();
DefaultDateFormatter dateFormatter = new DefaultDateFormatter();
// month starts at 0 so we need to add 1
assertEquals("2016-01-20:05:10:15", dateFormatter.format(millis));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.FieldMapping;
import org.apache.atlas.typesystem.types.HierarchicalType;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.Map;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.assertEquals;
/**
* Unit tests for DefaultPropertyMapper.
*/
public class DefaultPropertyMapperTest {
@Test
public void testToCleanName_defaultMappings() {
String typeName = "testType";
HierarchicalType dataType = createNiceMock(HierarchicalType.class);
// currently only use key in map
Map<String, AttributeInfo> fields = new HashMap<>();
fields.put("foo", null);
fields.put("prop", null);
// can't mock FieldMapping due to direct access to final instance var 'fields'
FieldMapping fieldMapping = new FieldMapping(fields, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
// mock expectations
expect(dataType.fieldMapping()).andReturn(fieldMapping).anyTimes();
replay(dataType);
PropertyMapper propertyMapper = new TestDefaultPropertyMapper(dataType);
assertEquals(propertyMapper.toCleanName("Prefix.prop", typeName), "prop");
assertEquals(propertyMapper.toCleanName("foo", typeName), "foo");
assertEquals(propertyMapper.toCleanName("other", typeName), "other");
assertEquals(propertyMapper.toCleanName("Prefix.other", typeName), "Prefix.other");
verify(dataType);
}
@Test
public void testToQualifiedName_defaultMappings() throws Exception {
String typeName = "testType";
HierarchicalType dataType = createNiceMock(HierarchicalType.class);
// currently only use key in map
Map<String, AttributeInfo> fields = new HashMap<>();
fields.put("foo", null);
fields.put("prop", null);
// can't mock FieldMapping due to direct access to final instance var 'fields'
FieldMapping fieldMapping = new FieldMapping(fields, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
// mock expectations
expect(dataType.fieldMapping()).andReturn(fieldMapping).anyTimes();
expect(dataType.getQualifiedName("foo")).andReturn("foo");
expect(dataType.getQualifiedName("prop")).andReturn("Prefix.prop");
replay(dataType);
PropertyMapper propertyMapper = new TestDefaultPropertyMapper(dataType);
assertEquals(propertyMapper.toFullyQualifiedName("foo", typeName), "foo");
assertEquals(propertyMapper.toFullyQualifiedName("prop", typeName), "Prefix.prop");
assertEquals(propertyMapper.toFullyQualifiedName("other", typeName), "other");
assertEquals(propertyMapper.toFullyQualifiedName("Prefix.other", typeName), "Prefix.other");
verify(dataType);
}
@Test
public void testToCleanName_specifiedMappings() {
String typeName = "testType";
HierarchicalType dataType = createNiceMock(HierarchicalType.class);
// currently only use key in map
Map<String, AttributeInfo> fields = new HashMap<>();
fields.put("foo", null);
fields.put("prop", null);
// can't mock FieldMapping due to direct access to final instance var 'fields'
FieldMapping fieldMapping = new FieldMapping(fields, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
// mock expectations
expect(dataType.fieldMapping()).andReturn(fieldMapping).anyTimes();
replay(dataType);
Map<String, String> cleanToQualifiedMap = new HashMap<>();
cleanToQualifiedMap.put("prop1", "property_1");
Map<String, String> qualifiedToCleanMap = new HashMap<>();
qualifiedToCleanMap.put("property_1", "prop1");
PropertyMapper propertyMapper = new TestDefaultPropertyMapper(
typeName, qualifiedToCleanMap, cleanToQualifiedMap, dataType);
assertEquals(propertyMapper.toCleanName("property_1", typeName), "prop1");
assertEquals(propertyMapper.toCleanName("Prefix.prop", typeName), "prop");
assertEquals(propertyMapper.toCleanName("foo", typeName), "foo");
assertEquals(propertyMapper.toCleanName("other", typeName), "other");
assertEquals(propertyMapper.toCleanName("Prefix.other", typeName), "Prefix.other");
verify(dataType);
}
@Test
public void testToQualifiedName_specifiedMappings() throws Exception {
String typeName = "testType";
HierarchicalType dataType = createNiceMock(HierarchicalType.class);
// currently only use key in map
Map<String, AttributeInfo> fields = new HashMap<>();
fields.put("foo", null);
fields.put("prop", null);
// can't mock FieldMapping due to direct access to final instance var 'fields'
FieldMapping fieldMapping = new FieldMapping(fields, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
// mock expectations
expect(dataType.fieldMapping()).andReturn(fieldMapping).anyTimes();
expect(dataType.getQualifiedName("foo")).andReturn("foo");
expect(dataType.getQualifiedName("prop")).andReturn("Prefix.prop");
replay(dataType);
Map<String, String> cleanToQualifiedMap = new HashMap<>();
cleanToQualifiedMap.put("prop1", "property_1");
Map<String, String> qualifiedToCleanMap = new HashMap<>();
qualifiedToCleanMap.put("property_1", "prop1");
PropertyMapper propertyMapper = new TestDefaultPropertyMapper(
typeName, qualifiedToCleanMap, cleanToQualifiedMap, dataType);
assertEquals(propertyMapper.toFullyQualifiedName("prop1", typeName), "property_1");
assertEquals(propertyMapper.toFullyQualifiedName("foo", typeName), "foo");
assertEquals(propertyMapper.toFullyQualifiedName("prop", typeName), "Prefix.prop");
assertEquals(propertyMapper.toFullyQualifiedName("other", typeName), "other");
assertEquals(propertyMapper.toFullyQualifiedName("Prefix.other", typeName), "Prefix.other");
verify(dataType);
}
private static class TestDefaultPropertyMapper extends DefaultPropertyMapper {
private HierarchicalType dataType;
public TestDefaultPropertyMapper(HierarchicalType dataType) {
super();
this.dataType = dataType;
}
public TestDefaultPropertyMapper(String type,
Map<String, String> qualifiedToCleanMap,
Map<String, String> cleanToQualifiedMap,
HierarchicalType dataType) {
super(qualifiedToCleanMap, cleanToQualifiedMap);
this.dataType = dataType;
}
@Override
protected HierarchicalType createDataType(String type) {
return dataType;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.AtlasQuery;
import org.apache.atlas.catalog.query.QueryFactory;
import org.easymock.Capture;
import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit Tests for EntityResourceProvider.
*/
public class EntityResourceProviderTest {
@Test
public void testGetResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("id", "1");
queryResultRow.put("creation_time", "04/20/2016");
// mock expectations
expect(queryFactory.createEntityQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request userRequest = new InstanceRequest(requestProperties);
Result result = provider.getResourceById(userRequest);
assertEquals(1, result.getPropertyMaps().size());
assertEquals(queryResultRow, result.getPropertyMaps().iterator().next());
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(requestProperties, request.getQueryProperties());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = ResourceNotFoundException.class)
public void testGetResource_404() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty response should result in a ResourceNotFoundException
Collection<Map<String, Object>> emptyResponse = new ArrayList<>();
// mock expectations
expect(queryFactory.createEntityQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(emptyResponse);
replay(typeSystem, queryFactory, query);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request request = new InstanceRequest(requestProperties);
provider.getResourceById(request);
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow1 = new HashMap<>();
queryResult.add(queryResultRow1);
queryResultRow1.put("mame", "entity1");
queryResultRow1.put("description", "test entity description");
queryResultRow1.put("creation_time", "04/20/2016");
Map<String, Object> queryResultRow2 = new HashMap<>();
queryResult.add(queryResultRow2);
queryResultRow2.put("mame", "entity2");
queryResultRow2.put("description", "test entity description 2");
queryResultRow2.put("creation_time", "04/21/2016");
// mock expectations
expect(queryFactory.createEntityQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Request userRequest = new CollectionRequest(Collections.<String, Object>emptyMap(), "name:entity*");
Result result = provider.getResources(userRequest);
assertEquals(2, result.getPropertyMaps().size());
assertTrue(result.getPropertyMaps().contains(queryResultRow1));
assertTrue(result.getPropertyMaps().contains(queryResultRow2));
Request request = requestCapture.getValue();
assertEquals("name:entity*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(0, request.getQueryProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources_noResults() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty result shouldn't result in exception for collection query
Collection<Map<String, Object>> queryResult = new ArrayList<>();
// mock expectations
expect(queryFactory.createEntityQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Request userRequest = new CollectionRequest(Collections.<String, Object>emptyMap(), "name:entity*");
Result result = provider.getResources(userRequest);
assertEquals(0, result.getPropertyMaps().size());
Request request = requestCapture.getValue();
assertEquals("name:entity*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(0, request.getQueryProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testCreateResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
// mock expectations
replay(typeSystem, queryFactory, query);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request userRequest = new InstanceRequest(requestProperties);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testCreateResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
// mock expectations
replay(typeSystem, queryFactory, query);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request userRequest = new InstanceRequest(requestProperties);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResources(userRequest);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.exception.CatalogException;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.AtlasQuery;
import org.apache.atlas.catalog.query.QueryFactory;
import org.easymock.Capture;
import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for EntityTagResourceProvider.
*/
public class EntityTagResourceProviderTest {
@Test
public void testGetResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "taxonomyName.termName");
queryResultRow.put("description", "test term description");
// mock expectations
expect(queryFactory.createEntityTagQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName.termName");
requestProperties.put("id", "1");
Request userRequest = new InstanceRequest(requestProperties);
Result result = provider.getResourceById(userRequest);
assertEquals(1, result.getPropertyMaps().size());
assertEquals(queryResultRow, result.getPropertyMaps().iterator().next());
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(2, request.getQueryProperties().size());
assertEquals("taxonomyName.termName", request.getQueryProperties().get("name"));
assertEquals(Request.Cardinality.INSTANCE, request.getCardinality());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = ResourceNotFoundException.class)
public void testGetResource_404() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty response should result in a ResourceNotFoundException
Collection<Map<String, Object>> emptyResponse = new ArrayList<>();
// mock expectations
expect(queryFactory.createEntityTagQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(emptyResponse);
replay(typeSystem, queryFactory, query);
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName.termName");
requestProperties.put("id", "1");
Request request = new InstanceRequest(requestProperties);
provider.getResourceById(request);
}
@Test
public void testGetResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow1 = new HashMap<>();
queryResult.add(queryResultRow1);
queryResultRow1.put("name", "testTaxonomy.termName");
queryResultRow1.put("description", "test term description");
Map<String, Object> queryResultRow2 = new HashMap<>();
queryResult.add(queryResultRow2);
queryResultRow2.put("name", "testTaxonomy.termName2");
queryResultRow2.put("description", "test term 2 description");
// mock expectations
expect(queryFactory.createEntityTagQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request userRequest = new CollectionRequest(requestProperties, "name:testTaxonomy.*");
// invoke test method
Result result = provider.getResources(userRequest);
assertEquals(2, result.getPropertyMaps().size());
assertTrue(result.getPropertyMaps().contains(queryResultRow1));
assertTrue(result.getPropertyMaps().contains(queryResultRow2));
Request request = requestCapture.getValue();
assertEquals("name:testTaxonomy.*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources_noResults() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
// mock expectations
expect(queryFactory.createEntityTagQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request userRequest = new CollectionRequest(requestProperties, "name:testTaxonomy.*");
// invoke test method
Result result = provider.getResources(userRequest);
assertEquals(0, result.getPropertyMaps().size());
Request request = requestCapture.getValue();
assertEquals("name:testTaxonomy.*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testCreateResource_invalidRequest__noName() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
replay(typeSystem, queryFactory, query);
Map<String, Object> requestProperties = new HashMap<>();
// missing name name should result in InvalidPayloadException
requestProperties.put("description", "description");
Request userRequest = new InstanceRequest(requestProperties);
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
}
@Test
public void testCreateResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
ResourceProvider termResourceProvider = createStrictMock(TermResourceProvider.class);
Capture<Request> termRequestCapture = newCapture();
Collection<Map<String, Object>> termQueryResult = new ArrayList<>();
Map<String, Object> termQueryResultRow = new HashMap<>();
termQueryResult.add(termQueryResultRow);
termQueryResultRow.put("name", "testTaxonomy.termName");
termQueryResultRow.put("type", "testTaxonomy.termName");
termQueryResultRow.put("available_as_tag", true);
termQueryResultRow.put("description", "term description");
Result termResult = new Result(termQueryResult);
// mock expectations
expect(termResourceProvider.getResourceById(capture(termRequestCapture))).andReturn(termResult);
Map<String, Object> tagProperties = new HashMap<>();
tagProperties.put("name", "testTaxonomy.termName");
tagProperties.put("description", "term description");
typeSystem.createTraitInstance("11-22-33", "testTaxonomy.termName", tagProperties);
replay(typeSystem, queryFactory, query, termResourceProvider);
EntityTagResourceProvider provider = new TestEntityTagResourceProvider(typeSystem, termResourceProvider);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "testTaxonomy.termName");
requestProperties.put("id", "11-22-33");
Request userRequest = new InstanceRequest(requestProperties);
provider.createResource(userRequest);
Request termRequest = termRequestCapture.getValue();
Map<String, Object> termRequestProps = termRequest.getQueryProperties();
assertEquals(1, termRequestProps.size());
TermPath termPath = (TermPath) termRequestProps.get("termPath");
assertEquals("testTaxonomy.termName", termPath.getFullyQualifiedName());
assertEquals(1, termRequest.getAdditionalSelectProperties().size());
assertEquals("type", termRequest.getAdditionalSelectProperties().iterator().next());
assertNull(termRequest.getQueryString());
verify(typeSystem, queryFactory, query, termResourceProvider);
}
@Test(expectedExceptions = CatalogException.class)
public void testCreateResource_invalidRequest__termNotAvailableForTagging() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
ResourceProvider termResourceProvider = createStrictMock(TermResourceProvider.class);
Capture<Request> termRequestCapture = newCapture();
Collection<Map<String, Object>> termQueryResult = new ArrayList<>();
Map<String, Object> termQueryResultRow = new HashMap<>();
termQueryResult.add(termQueryResultRow);
termQueryResultRow.put("name", "testTaxonomy.termName");
termQueryResultRow.put("type", "testTaxonomy.termName");
// false value for 'available_as_tag' should result in an exception
termQueryResultRow.put("available_as_tag", false);
termQueryResultRow.put("description", "term description");
Result termResult = new Result(termQueryResult);
// mock expectations
expect(termResourceProvider.getResourceById(capture(termRequestCapture))).andReturn(termResult);
replay(typeSystem, queryFactory, query, termResourceProvider);
EntityTagResourceProvider provider = new TestEntityTagResourceProvider(typeSystem, termResourceProvider);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "testTaxonomy.termName");
requestProperties.put("id", "11-22-33");
Request userRequest = new InstanceRequest(requestProperties);
provider.createResource(userRequest);
}
@Test(expectedExceptions = ResourceAlreadyExistsException.class)
public void testCreateResource_invalidRequest__alreadyExists() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
ResourceProvider termResourceProvider = createStrictMock(TermResourceProvider.class);
Capture<Request> termRequestCapture = newCapture();
Collection<Map<String, Object>> termQueryResult = new ArrayList<>();
Map<String, Object> termQueryResultRow = new HashMap<>();
termQueryResult.add(termQueryResultRow);
termQueryResultRow.put("name", "testTaxonomy.termName");
termQueryResultRow.put("type", "testTaxonomy.termName");
termQueryResultRow.put("available_as_tag", true);
termQueryResultRow.put("description", "term description");
Result termResult = new Result(termQueryResult);
// mock expectations
expect(termResourceProvider.getResourceById(capture(termRequestCapture))).andReturn(termResult);
Map<String, Object> tagProperties = new HashMap<>();
tagProperties.put("name", "testTaxonomy.termName");
tagProperties.put("description", "term description");
typeSystem.createTraitInstance("11-22-33", "testTaxonomy.termName", tagProperties);
expectLastCall().andThrow(new ResourceAlreadyExistsException(""));
replay(typeSystem, queryFactory, query, termResourceProvider);
EntityTagResourceProvider provider = new TestEntityTagResourceProvider(typeSystem, termResourceProvider);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "testTaxonomy.termName");
requestProperties.put("id", "11-22-33");
Request userRequest = new InstanceRequest(requestProperties);
provider.createResource(userRequest);
}
@Test
public void testCreateResources() throws Exception {
AtlasTypeSystem typeSystem = createMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery entityQuery = createMock(AtlasQuery.class);
ResourceProvider termResourceProvider = createMock(TermResourceProvider.class);
Capture<Request> entityRequestCapture = newCapture();
Capture<Request> termRequestCapture1 = newCapture();
Capture<Request> termRequestCapture2 = newCapture();
Collection<Map<String, Object>> entityQueryResult = new ArrayList<>();
Map<String, Object> entityQueryResultRow = new HashMap<>();
entityQueryResultRow.put("id", "1");
entityQueryResult.add(entityQueryResultRow);
Map<String, Object> entityQueryResultRow2 = new HashMap<>();
entityQueryResultRow2.put("id", "2");
entityQueryResult.add(entityQueryResultRow2);
Collection<Map<String, Object>> termQueryResult1 = new ArrayList<>();
Map<String, Object> termQueryResultRow1 = new HashMap<>();
termQueryResult1.add(termQueryResultRow1);
termQueryResultRow1.put("name", "testTaxonomy.termName1");
termQueryResultRow1.put("type", "testTaxonomy.termName1");
termQueryResultRow1.put("available_as_tag", true);
termQueryResultRow1.put("description", "term description");
Result termResult1 = new Result(termQueryResult1);
Collection<Map<String, Object>> termQueryResult2 = new ArrayList<>();
Map<String, Object> termQueryResultRow2 = new HashMap<>();
termQueryResult2.add(termQueryResultRow2);
termQueryResultRow2.put("name", "testTaxonomy.termName2");
termQueryResultRow2.put("type", "testTaxonomy.termName2");
termQueryResultRow2.put("available_as_tag", true);
termQueryResultRow2.put("description", "term 2 description");
Result termResult2 = new Result(termQueryResult2);
// mock expectations
expect(queryFactory.createEntityQuery(capture(entityRequestCapture))).andReturn(entityQuery);
expect(entityQuery.execute()).andReturn(entityQueryResult);
expect(termResourceProvider.getResourceById(capture(termRequestCapture1))).andReturn(termResult1);
expect(termResourceProvider.getResourceById(capture(termRequestCapture2))).andReturn(termResult2);
Map<String, Object> tagProperties1 = new HashMap<>();
tagProperties1.put("name", "testTaxonomy.termName1");
tagProperties1.put("description", "term description");
// each tag is associated with each entity
typeSystem.createTraitInstance("1", "testTaxonomy.termName1", tagProperties1);
typeSystem.createTraitInstance("2", "testTaxonomy.termName1", tagProperties1);
Map<String, Object> tagProperties2 = new HashMap<>();
tagProperties2.put("name", "testTaxonomy.termName2");
tagProperties2.put("description", "term 2 description");
// each tag is associated with each entity
typeSystem.createTraitInstance("1", "testTaxonomy.termName2", tagProperties2);
typeSystem.createTraitInstance("2", "testTaxonomy.termName2", tagProperties2);
replay(typeSystem, queryFactory, entityQuery, termResourceProvider);
// end mock expectations
EntityTagResourceProvider provider = new TestEntityTagResourceProvider(typeSystem, termResourceProvider);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProps = new HashMap<>();
Collection<Map<String, String>> tagMaps = new ArrayList<>();
requestProps.put("tags", tagMaps);
Map<String, String> tagMap1 = new HashMap<>();
tagMap1.put("name", "testTaxonomy.termName1");
tagMaps.add(tagMap1);
Map<String, String> tagMap2 = new HashMap<>();
tagMap2.put("name", "testTaxonomy.termName2");
tagMaps.add(tagMap2);
Request userRequest = new CollectionRequest(requestProps, "name:foo*");
// invoke method being tested
Collection<String> createResult = provider.createResources(userRequest);
assertEquals(4, createResult.size());
assertTrue(createResult.contains("v1/entities/1/tags/testTaxonomy.termName1"));
assertTrue(createResult.contains("v1/entities/1/tags/testTaxonomy.termName2"));
assertTrue(createResult.contains("v1/entities/2/tags/testTaxonomy.termName1"));
assertTrue(createResult.contains("v1/entities/2/tags/testTaxonomy.termName2"));
Request entityRequest = entityRequestCapture.getValue();
assertEquals("name:foo*", entityRequest.getQueryString());
assertEquals(Request.Cardinality.COLLECTION, entityRequest.getCardinality());
Request termRequest1 = termRequestCapture1.getValue();
assertNull(termRequest1.getQueryString());
assertEquals(Request.Cardinality.INSTANCE, termRequest1.getCardinality());
Map<String, Object> termRequestProps = termRequest1.getQueryProperties();
assertEquals(1, termRequestProps.size());
TermPath termPath = (TermPath) termRequestProps.get("termPath");
assertEquals("testTaxonomy.termName1", termPath.getFullyQualifiedName());
Request termRequest2 = termRequestCapture2.getValue();
assertNull(termRequest2.getQueryString());
assertEquals(Request.Cardinality.INSTANCE, termRequest2.getCardinality());
Map<String, Object> termRequestProps2 = termRequest2.getQueryProperties();
assertEquals(1, termRequestProps2.size());
TermPath termPath2 = (TermPath) termRequestProps2.get("termPath");
assertEquals("testTaxonomy.termName2", termPath2.getFullyQualifiedName());
verify(typeSystem, queryFactory, entityQuery, termResourceProvider);
}
@Test
public void testDeleteResourceById() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
// mock expectations
typeSystem.deleteTag("1", "taxonomyName.termName");
replay(typeSystem);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName.termName");
requestProperties.put("id", "1");
Request userRequest = new InstanceRequest(requestProperties);
// instantiate EntityTagResourceProvider and invoke method being tested
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(null);
provider.deleteResourceById(userRequest);
verify(typeSystem);
}
//todo: test behavior of createResources in case of partial success after behavior is defined
private static class TestEntityTagResourceProvider extends EntityTagResourceProvider {
private ResourceProvider testTermResourceProvider;
public TestEntityTagResourceProvider(AtlasTypeSystem typeSystem, ResourceProvider termResourceProvider) {
super(typeSystem);
testTermResourceProvider = termResourceProvider;
}
@Override
protected synchronized ResourceProvider getTermResourceProvider() {
return testTermResourceProvider;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for InstanceRequest.
*/
public class InstanceRequestTest {
@Test
public void testRequestProperties() {
Map<String, Object> properties = new HashMap<>();
properties.put("foo", "fooValue");
properties.put("someBoolean", true);
Request request = new InstanceRequest(properties);
assertEquals(Request.Cardinality.INSTANCE, request.getCardinality());
assertEquals(properties, request.getQueryProperties());
assertEquals("fooValue", request.getProperty("foo"));
assertTrue(request.<Boolean>getProperty("someBoolean"));
assertNull(request.getProperty("other"));
assertTrue(request.getAdditionalSelectProperties().isEmpty());
}
@Test
public void testSelectProperties() {
Map<String, Object> properties = new HashMap<>();
properties.put("foo", "fooValue");
properties.put("someBoolean", true);
Request request = new InstanceRequest(properties);
Collection<String> additionalSelectProps = new ArrayList<>();
additionalSelectProps.add("prop1");
additionalSelectProps.add("prop2");
request.addAdditionalSelectProperties(additionalSelectProps);
Collection<String> requestAdditionalSelectProps = request.getAdditionalSelectProperties();
assertEquals(2, requestAdditionalSelectProps.size());
assertTrue(requestAdditionalSelectProps.contains("prop1"));
assertTrue(requestAdditionalSelectProps.contains("prop2"));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.testng.annotations.Test;
import javax.ws.rs.core.UriInfo;
import java.net.URI;
import java.util.*;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.testng.Assert.assertEquals;
/**
* Unit tests for JsonSerializer.
*/
public class JsonSerializerTest {
@Test
public void testSerialize() throws Exception {
UriInfo uriInfo = createStrictMock(UriInfo.class);
URI uri = new URI("http://test.com:8080/");
expect(uriInfo.getBaseUri()).andReturn(uri);
replay(uriInfo);
Collection<Map<String, Object>> resultMaps = new ArrayList<>();
// result map 1
ResourceComparator resourceComparator = new ResourceComparator();
Map<String, Object> resultMap1 = new TreeMap<>(resourceComparator);
resultMaps.add(resultMap1);
resultMap1.put("prop1", "property 1 value");
resultMap1.put("booleanProp", true);
resultMap1.put("numberProp", 100);
resultMap1.put("href", "v1/testResources/foo");
ArrayList<String> listProp = new ArrayList<>();
listProp.add("one");
listProp.add("two");
resultMap1.put("listProp", listProp);
Map<String, Object> mapProp = new TreeMap<>(resourceComparator);
mapProp.put("mapProp1", "mapProp1Value");
ArrayList<String> mapListProp = new ArrayList<>();
mapListProp.add("mapListOne");
mapListProp.add("mapListTwo");
mapProp.put("mapListProp", mapListProp);
mapProp.put("href", "v1/testResources/foobar");
resultMap1.put("mapProp", mapProp);
// result map 2
Map<String, Object> resultMap2 = new TreeMap<>(resourceComparator);
resultMaps.add(resultMap2);
resultMap2.put("nullProp", null);
resultMap2.put("href", "v1/testResources/bar");
ArrayList<Map<String, Object>> listProp2 = new ArrayList<>();
listProp2.add(Collections.<String, Object>singletonMap("listMapProp", "listMapPropValue"));
resultMap2.put("listProp", listProp2);
Result result = new Result(resultMaps);
JsonSerializer serializer = new JsonSerializer();
String resultJson = serializer.serialize(result, uriInfo);
assertEquals(resultJson, EXPECTED_JSON);
}
private static final String EXPECTED_JSON =
"[\n" +
" {\n" +
" \"href\": \"http://test.com:8080/v1/testResources/foo\",\n" +
" \"booleanProp\": true,\n" +
" \"numberProp\": 100,\n" +
" \"prop1\": \"property 1 value\",\n" +
" \"listProp\": [\n" +
" \"one\",\n" +
" \"two\"\n" +
" ],\n" +
" \"mapProp\": {\n" +
" \"href\": \"http://test.com:8080/v1/testResources/foobar\",\n" +
" \"mapProp1\": \"mapProp1Value\",\n" +
" \"mapListProp\": [\n" +
" \"mapListOne\",\n" +
" \"mapListTwo\"\n" +
" ]\n" +
" }\n" +
" },\n" +
" {\n" +
" \"href\": \"http://test.com:8080/v1/testResources/bar\",\n" +
" \"nullProp\": null,\n" +
" \"listProp\": [\n" +
" {\n" +
" \"listMapProp\": \"listMapPropValue\"\n" +
" }\n" +
" ]\n" +
" }\n" +
"]";
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.testng.annotations.Test;
import java.util.*;
import static org.testng.Assert.assertEquals;
/**
* Unit tests for ResourceComparator.
*/
public class ResourceComparatorTest {
@Test
public void testCompare() {
Map<String, Object> map = new TreeMap<>(new ResourceComparator());
map.put("a", "zzzzz");
map.put("name", 1);
map.put("z", "fdsfdsds");
map.put("d", new ArrayList<>());
map.put("id", 1);
map.put("e", false);
map.put("c", 1);
map.put("href", "dfdfgdf");
map.put("b", new HashMap<>());
map.put("description", 1);
map.put("f", 20);
map.put("type", 1);
Iterator<String> iter = map.keySet().iterator();
assertEquals(iter.next(), "href");
assertEquals(iter.next(), "name");
assertEquals(iter.next(), "id");
assertEquals(iter.next(), "description");
assertEquals(iter.next(), "type");
assertEquals(iter.next(), "a");
assertEquals(iter.next(), "b");
assertEquals(iter.next(), "c");
assertEquals(iter.next(), "d");
assertEquals(iter.next(), "e");
assertEquals(iter.next(), "f");
assertEquals(iter.next(), "z");
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.definition.TaxonomyResourceDefinition;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.AtlasQuery;
import org.apache.atlas.catalog.query.QueryFactory;
import org.easymock.Capture;
import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for TaxonomyResourceProvider.
*/
public class TaxonomyResourceProviderTest {
@Test
public void testGetResourceById() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "taxonomyName");
queryResultRow.put("description", "test taxonomy description");
queryResultRow.put("creation_time", "04/20/2016");
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request userRequest = new InstanceRequest(requestProperties);
Result result = provider.getResourceById(userRequest);
assertEquals(1, result.getPropertyMaps().size());
assertEquals(queryResultRow, result.getPropertyMaps().iterator().next());
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(requestProperties, request.getQueryProperties());
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResourceById_notInitialized_createDefaultTaxonomy() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> checkForAnyTaxonomiesCapture = newCapture();
Capture<Request> createDefaultTaxonomyRequestCapture = newCapture();
Capture<Request> requestCapture = newCapture();
Capture<ResourceDefinition> resourceDefinitionCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "taxonomyName");
queryResultRow.put("description", "test taxonomy description");
queryResultRow.put("creation_time", "04/20/2016");
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(checkForAnyTaxonomiesCapture))).andReturn(query);
expect(query.execute()).andReturn(Collections.<Map<String, Object>>emptySet());
expect(typeSystem.createEntity(capture(resourceDefinitionCapture), capture(createDefaultTaxonomyRequestCapture))).andReturn("testGuid");
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TestTaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setInitialized(false);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request userRequest = new InstanceRequest(requestProperties);
Result result = provider.getResourceById(userRequest);
assertEquals(1, result.getPropertyMaps().size());
assertEquals(queryResultRow, result.getPropertyMaps().iterator().next());
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(requestProperties, request.getQueryProperties());
Request checkForAnyTaxonomiesRequest = checkForAnyTaxonomiesCapture.getValue();
assertNull(checkForAnyTaxonomiesRequest.getQueryString());
assertEquals(checkForAnyTaxonomiesRequest.getAdditionalSelectProperties().size(), 0);
assertEquals(checkForAnyTaxonomiesRequest.getQueryProperties().size(), 0);
Request createDefaultTaxonomyRequest = createDefaultTaxonomyRequestCapture.getValue();
assertNull(createDefaultTaxonomyRequest.getQueryString());
assertEquals(createDefaultTaxonomyRequest.getAdditionalSelectProperties().size(), 0);
assertEquals(createDefaultTaxonomyRequest.getQueryProperties().size(), 2);
assertEquals(createDefaultTaxonomyRequest.getQueryProperties().get("name"),
TaxonomyResourceProvider.DEFAULT_TAXONOMY_NAME);
assertEquals(createDefaultTaxonomyRequest.getQueryProperties().get("description"),
TaxonomyResourceProvider.DEFAULT_TAXONOMY_DESCRIPTION);
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResourceById_notInitialized_taxonomyAlreadyExists() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> checkForAnyTaxonomiesCapture = newCapture();
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "taxonomyName");
queryResultRow.put("description", "test taxonomy description");
queryResultRow.put("creation_time", "04/20/2016");
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(checkForAnyTaxonomiesCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TestTaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setInitialized(false);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request userRequest = new InstanceRequest(requestProperties);
Result result = provider.getResourceById(userRequest);
assertEquals(1, result.getPropertyMaps().size());
assertEquals(queryResultRow, result.getPropertyMaps().iterator().next());
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(requestProperties, request.getQueryProperties());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = ResourceNotFoundException.class)
public void testGetResourceById_404() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty response should result in a ResourceNotFoundException
Collection<Map<String, Object>> emptyResponse = new ArrayList<>();
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(emptyResponse);
replay(typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request request = new InstanceRequest(requestProperties);
provider.getResourceById(request);
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow1 = new HashMap<>();
queryResult.add(queryResultRow1);
queryResultRow1.put("mame", "taxonomyName1");
queryResultRow1.put("description", "test taxonomy description");
queryResultRow1.put("creation_time", "04/20/2016");
Map<String, Object> queryResultRow2 = new HashMap<>();
queryResult.add(queryResultRow2);
queryResultRow2.put("mame", "taxonomyName2");
queryResultRow2.put("description", "test taxonomy description 2");
queryResultRow2.put("creation_time", "04/21/2016");
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Request userRequest = new CollectionRequest(Collections.<String, Object>emptyMap(), "name:taxonomy*");
Result result = provider.getResources(userRequest);
assertEquals(2, result.getPropertyMaps().size());
assertTrue(result.getPropertyMaps().contains(queryResultRow1));
assertTrue(result.getPropertyMaps().contains(queryResultRow2));
Request request = requestCapture.getValue();
assertEquals("name:taxonomy*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(0, request.getQueryProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources_noResults() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty result shouldn't result in exception for collection query
Collection<Map<String, Object>> queryResult = new ArrayList<>();
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Request userRequest = new CollectionRequest(Collections.<String, Object>emptyMap(), "name:taxonomy*");
Result result = provider.getResources(userRequest);
assertEquals(0, result.getPropertyMaps().size());
Request request = requestCapture.getValue();
assertEquals("name:taxonomy*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(0, request.getQueryProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testCreateResource_invalidRequest__noName() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
// mock expectations
replay(typeSystem, queryFactory, query);
// taxonomy create request must contain 'name' property
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("description", "test");
Request userRequest = new InstanceRequest(requestProperties);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
}
@Test(expectedExceptions = ResourceAlreadyExistsException.class)
public void testCreateResource_invalidRequest__alreadyExists() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// query is executed to see if resource already exists
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("mame", "taxonomyName");
queryResultRow.put("description", "test taxonomy description");
queryResultRow.put("creation_time", "04/20/2016");
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
// returning result for query should result in ResourceAlreadyExistsException
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
// taxonomy create request must contain 'name' property
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request userRequest = new InstanceRequest(requestProperties);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
}
@Test
public void testCreateResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<ResourceDefinition> resourceDefinitionCapture = newCapture();
Capture<Request> requestCapture = newCapture();
// empty response indicates that resource doesn't already exist
Collection<Map<String, Object>> queryResult = new ArrayList<>();
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
expect(typeSystem.createEntity(capture(resourceDefinitionCapture), capture(requestCapture))).andReturn("testGuid");
replay(typeSystem, queryFactory, query);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request userRequest = new InstanceRequest(requestProperties);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
assertEquals(new TaxonomyResourceDefinition().getTypeName(),
resourceDefinitionCapture.getValue().getTypeName());
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(requestProperties, request.getQueryProperties());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testCreateResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
// mock expectations
replay(typeSystem, queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request userRequest = new InstanceRequest(requestProperties);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResources(userRequest);
}
@Test
public void testDeleteResourceById() throws Exception {
TermResourceProvider termResourceProvider = createStrictMock(TermResourceProvider.class);
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> getRequestCapture = newCapture();
Capture<TermPath> termPathCapture = newCapture();
Capture<ResourceDefinition> resourceDefinitionCapture = newCapture();
Capture<Request> deleteRequestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "testTaxonomy");
queryResultRow.put("id", "111-222-333");
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(getRequestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
termResourceProvider.deleteChildren(eq("111-222-333"), capture(termPathCapture));
typeSystem.deleteEntity(capture(resourceDefinitionCapture), capture(deleteRequestCapture));
replay(termResourceProvider, typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem, termResourceProvider);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "testTaxonomy");
Request userRequest = new InstanceRequest(requestProperties);
// invoke method being tested
provider.deleteResourceById(userRequest);
Request getRequest = getRequestCapture.getValue();
assertNull(getRequest.getQueryString());
assertEquals(getRequest.getAdditionalSelectProperties().size(), 1);
assertTrue(getRequest.getAdditionalSelectProperties().contains("id"));
assertEquals(getRequest.getQueryProperties().get("name"), "testTaxonomy");
Request deleteRequest = deleteRequestCapture.getValue();
assertNull(deleteRequest.getQueryString());
assertEquals(deleteRequest.getAdditionalSelectProperties().size(), 1);
assertTrue(deleteRequest.getAdditionalSelectProperties().contains("id"));
assertEquals(deleteRequest.getQueryProperties().get("name"), "testTaxonomy");
ResourceDefinition resourceDefinition = resourceDefinitionCapture.getValue();
assertTrue(resourceDefinition instanceof TaxonomyResourceDefinition);
verify(termResourceProvider, typeSystem, queryFactory, query);
}
@Test(expectedExceptions = ResourceNotFoundException.class)
public void testDeleteResourceById_404() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> getRequestCapture = newCapture();
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(getRequestCapture))).andReturn(query);
expect(query.execute()).andThrow(new ResourceNotFoundException("test msg"));
replay(typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem, null);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "badName");
Request userRequest = new InstanceRequest(requestProperties);
// invoke method being tested
provider.deleteResourceById(userRequest);
}
@Test
public void testUpdateResourceById() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> taxonomyRequestCapture = newCapture();
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "testTaxonomy");
Map<String, Object> requestUpdateProperties = new HashMap<>();
requestUpdateProperties.put("description", "updatedValue");
Request userRequest = new InstanceRequest(requestProperties, requestUpdateProperties);
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "testTaxonomy");
// mock expectations
// term update
expect(queryFactory.createTaxonomyQuery(capture(taxonomyRequestCapture))).andReturn(query);
expect(query.execute(requestUpdateProperties)).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
// instantiate resource provider and invoke method being tested
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.updateResourceById(userRequest);
Request request = taxonomyRequestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(request.getQueryProperties().size(), 1);
assertEquals(request.getQueryProperties().get("name"), "testTaxonomy");
assertEquals(request.getUpdateProperties().size(), 1);
assertEquals(request.getUpdateProperties().get("description"), "updatedValue");
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testUpdateResourceById_attemptNameChange() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> taxonomyRequestCapture = newCapture();
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "testTaxonomy");
Map<String, Object> requestUpdateProperties = new HashMap<>();
requestUpdateProperties.put("name", "notCurrentlySupported");
Request userRequest = new InstanceRequest(requestProperties, requestUpdateProperties);
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "testTaxonomy");
// mock expectations
// term update
expect(queryFactory.createTaxonomyQuery(capture(taxonomyRequestCapture))).andReturn(query);
expect(query.execute(requestUpdateProperties)).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
// instantiate resource provider and invoke method being tested
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.updateResourceById(userRequest);
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = ResourceNotFoundException.class)
public void testUpdateResourceById_404() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> taxonomyRequestCapture = newCapture();
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "testTaxonomy");
Map<String, Object> requestUpdateProperties = new HashMap<>();
requestUpdateProperties.put("description", "updated");
Request userRequest = new InstanceRequest(requestProperties, requestUpdateProperties);
// mock expectations
// term update
expect(queryFactory.createTaxonomyQuery(capture(taxonomyRequestCapture))).andReturn(query);
expect(query.execute(requestUpdateProperties)).andReturn(Collections.<Map<String, Object>>emptyList());
replay(typeSystem, queryFactory, query);
// instantiate resource provider and invoke method being tested
TaxonomyResourceProvider provider = new TestTaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.updateResourceById(userRequest);
verify(typeSystem, queryFactory, query);
}
private static class TestTaxonomyResourceProvider extends TaxonomyResourceProvider {
private final TermResourceProvider termResourceProvider;
private boolean isInitialized = true;
public TestTaxonomyResourceProvider(AtlasTypeSystem typeSystem) {
super(typeSystem);
this.termResourceProvider = null;
}
public TestTaxonomyResourceProvider(AtlasTypeSystem typeSystem, TermResourceProvider termResourceProvider) {
super(typeSystem);
this.termResourceProvider = termResourceProvider;
}
public void setInitialized(boolean isInitialized) {
this.isInitialized = isInitialized;
}
@Override
protected synchronized TermResourceProvider getTermResourceProvider() {
return termResourceProvider;
}
@Override
protected boolean autoInitializationChecked() {
return isInitialized;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.AtlasQuery;
import org.apache.atlas.catalog.query.QueryFactory;
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for TermResourceProvider.
*/
public class TermResourceProviderTest {
@Test
public void testGetResourceById() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
TermPath termPath = new TermPath("testTaxonomy", "termName");
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "testTaxonomy.termName");
queryResultRow.put("description", "test term description");
queryResultRow.put("creation_time", "04/20/2016");
queryResultRow.put("acceptable_use", "anything");
queryResultRow.put("available_as_tag", true);
Map<String, Object> hierarchyMap = new HashMap<>();
queryResultRow.put("hierarchy", hierarchyMap);
hierarchyMap.put("path", "/");
hierarchyMap.put("short_name", "termName");
hierarchyMap.put("taxonomy", "testTaxonomy");
// mock expectations
expect(queryFactory.createTermQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
Result result = provider.getResourceById(userRequest);
assertEquals(result.getPropertyMaps().size(), 1);
assertEquals(result.getPropertyMaps().iterator().next(), queryResultRow);
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(request.getAdditionalSelectProperties().size(), 0);
assertEquals(request.getQueryProperties().size(), 2);
assertEquals(request.getQueryProperties().get("termPath"), termPath);
assertEquals(request.getQueryProperties().get("name"), termPath.getFullyQualifiedName());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = ResourceNotFoundException.class)
public void testGetResourceById_404() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty response should result in a ResourceNotFoundException
Collection<Map<String, Object>> emptyResponse = new ArrayList<>();
// mock expectations
expect(queryFactory.createTermQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(emptyResponse);
replay(typeSystem, queryFactory, query);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", new TermPath("taxonomyName.badTermName"));
Request request = new InstanceRequest(requestProperties);
provider.getResourceById(request);
}
@Test
public void testGetResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
TermPath termPath = new TermPath("testTaxonomy", null);
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow1 = new HashMap<>();
queryResult.add(queryResultRow1);
queryResultRow1.put("name", "testTaxonomy.termName");
queryResultRow1.put("description", "test term description");
queryResultRow1.put("creation_time", "04/20/2016");
queryResultRow1.put("acceptable_use", "anything");
queryResultRow1.put("available_as_tag", true);
Map<String, Object> hierarchyMap = new HashMap<>();
queryResultRow1.put("hierarchy", hierarchyMap);
hierarchyMap.put("path", "/");
hierarchyMap.put("short_name", "termName");
hierarchyMap.put("taxonomy", "testTaxonomy");
Map<String, Object> queryResultRow2 = new HashMap<>();
queryResult.add(queryResultRow2);
queryResultRow2.put("name", "testTaxonomy.termName2");
queryResultRow2.put("description", "test term 2 description");
queryResultRow2.put("creation_time", "04/21/2016");
queryResultRow2.put("acceptable_use", "anything");
queryResultRow2.put("available_as_tag", true);
Map<String, Object> hierarchyMap2 = new HashMap<>();
queryResultRow2.put("hierarchy", hierarchyMap2);
hierarchyMap2.put("path", "/");
hierarchyMap2.put("short_name", "termName2");
hierarchyMap2.put("taxonomy", "testTaxonomy");
// mock expectations
expect(queryFactory.createTermQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new CollectionRequest(requestProperties, "name:taxonomy*");
// invoke test method
Result result = provider.getResources(userRequest);
assertEquals(result.getPropertyMaps().size(), 2);
assertTrue(result.getPropertyMaps().contains(queryResultRow1));
assertTrue(result.getPropertyMaps().contains(queryResultRow2));
Request request = requestCapture.getValue();
assertEquals(request.getQueryString(), "name:taxonomy*");
assertEquals(request.getAdditionalSelectProperties().size(), 0);
assertEquals(request.getQueryProperties().size(), 1);
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources_noResults() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
TermPath termPath = new TermPath("testTaxonomy", "termName");
// empty result shouldn't result in exception for collection query
Collection<Map<String, Object>> queryResult = new ArrayList<>();
// mock expectations
expect(queryFactory.createTermQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new CollectionRequest(requestProperties, "name:taxonomy*");
// invoke test method
Result result = provider.getResources(userRequest);
assertEquals(0, result.getPropertyMaps().size());
Request request = requestCapture.getValue();
assertEquals(request.getQueryString(), "name:taxonomy*");
assertEquals(request.getAdditionalSelectProperties().size(), 0);
assertEquals(request.getQueryProperties().size(), 1);
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testCreateResource_invalidRequest__noName() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
// null term name should result in InvalidPayloadException
TermPath termPath = new TermPath("testTaxonomy", null);
// mock expectations
replay(typeSystem, queryFactory, query);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
}
@Test
public void testCreateResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<ResourceDefinition> resourceDefinitionCapture = newCapture();
ResourceProvider taxonomyResourceProvider = createStrictMock(TaxonomyResourceProvider.class);
Capture<Request> taxonomyRequestCapture = newCapture();
Collection<Map<String, Object>> taxonomyQueryResult = new ArrayList<>();
Map<String, Object> taxonomyQueryResultRow = new HashMap<>();
taxonomyQueryResult.add(taxonomyQueryResultRow);
taxonomyQueryResultRow.put("name", "testTaxonomy");
taxonomyQueryResultRow.put("id", "11-22-33");
Result taxonomyResult = new Result(taxonomyQueryResult);
Map<String, Object> expectedRequestProps = new HashMap<>();
expectedRequestProps.put("name", "testTaxonomy.termName");
// when not specified, the default value of 'true' should be set
expectedRequestProps.put("available_as_tag", true);
// mock expectations
expect(taxonomyResourceProvider.getResourceById(capture(taxonomyRequestCapture))).andReturn(taxonomyResult);
typeSystem.createTraitType(capture(resourceDefinitionCapture), eq("testTaxonomy.termName"), EasyMock.<String>isNull());
typeSystem.createTraitInstance("11-22-33", "testTaxonomy.termName", expectedRequestProps);
replay(typeSystem, queryFactory, query, taxonomyResourceProvider);
TermResourceProvider provider = new TestTermResourceProvider(typeSystem, taxonomyResourceProvider);
provider.setQueryFactory(queryFactory);
TermPath termPath = new TermPath("testTaxonomy", "termName");
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
provider.createResource(userRequest);
Request taxonomyRequest = taxonomyRequestCapture.getValue();
Map<String, Object> taxonomyRequestProps = taxonomyRequest.getQueryProperties();
assertEquals(taxonomyRequestProps.size(), 1);
assertEquals(taxonomyRequestProps.get("name"), "testTaxonomy");
assertEquals(taxonomyRequest.getAdditionalSelectProperties().size(), 1);
assertEquals(taxonomyRequest.getAdditionalSelectProperties().iterator().next(), "id");
assertNull(taxonomyRequest.getQueryString());
ResourceDefinition resourceDefinition = resourceDefinitionCapture.getValue();
assertEquals(resourceDefinition.getTypeName(), "Term");
verify(typeSystem, queryFactory, query, taxonomyResourceProvider);
}
@Test(expectedExceptions = ResourceAlreadyExistsException.class)
public void testCreateResource_invalidRequest__alreadyExists() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<ResourceDefinition> resourceDefinitionCapture = newCapture();
ResourceProvider taxonomyResourceProvider = createStrictMock(TaxonomyResourceProvider.class);
Capture<Request> taxonomyRequestCapture = newCapture();
Collection<Map<String, Object>> taxonomyQueryResult = new ArrayList<>();
Map<String, Object> taxonomyQueryResultRow = new HashMap<>();
taxonomyQueryResult.add(taxonomyQueryResultRow);
taxonomyQueryResultRow.put("name", "testTaxonomy");
taxonomyQueryResultRow.put("id", "11-22-33");
Result taxonomyResult = new Result(taxonomyQueryResult);
// mock expectations
expect(taxonomyResourceProvider.getResourceById(capture(taxonomyRequestCapture))).andReturn(taxonomyResult);
typeSystem.createTraitType(capture(resourceDefinitionCapture), eq("testTaxonomy.termName"), EasyMock.<String>isNull());
expectLastCall().andThrow(new ResourceAlreadyExistsException(""));
replay(typeSystem, queryFactory, query, taxonomyResourceProvider);
TermResourceProvider provider = new TestTermResourceProvider(typeSystem, taxonomyResourceProvider);
provider.setQueryFactory(queryFactory);
TermPath termPath = new TermPath("testTaxonomy", "termName");
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
provider.createResource(userRequest);
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testCreateResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
// mock expectations
replay(typeSystem, queryFactory);
TermPath termPath = new TermPath("testTaxonomy", "termName");
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResources(userRequest);
}
@Test
public void testDeleteResourceById() throws Exception {
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider entityResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider entityTagResourceProvider = createStrictMock(ResourceProvider.class);
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> taxonomyRequestCapture = newCapture();
Capture<Request> termRequestCapture = newCapture();
// root term being deleted
TermPath termPath = new TermPath("testTaxonomy.termName");
// entity requests to get id's of entities tagged with terms
Request entityRequest1 = new CollectionRequest(Collections.<String, Object>emptyMap(),
"tags/name:testTaxonomy.termName.child1");
Request entityRequest2 = new CollectionRequest(Collections.<String, Object>emptyMap(),
"tags/name:testTaxonomy.termName.child2");
Request entityRequest3 = new CollectionRequest(Collections.<String, Object>emptyMap(),
"tags/name:testTaxonomy.termName");
// entity tag requests to delete entity tags
Map<String, Object> entityTagRequestMap1 = new HashMap<>();
entityTagRequestMap1.put("id", "111");
entityTagRequestMap1.put("name", "testTaxonomy.termName.child1");
Request entityTagRequest1 = new InstanceRequest(entityTagRequestMap1);
Map<String, Object> entityTagRequestMap2 = new HashMap<>();
entityTagRequestMap2.put("id", "222");
entityTagRequestMap2.put("name", "testTaxonomy.termName.child1");
Request entityTagRequest2 = new InstanceRequest(entityTagRequestMap2);
Map<String, Object> entityTagRequestMap3 = new HashMap<>();
entityTagRequestMap3.put("id", "333");
entityTagRequestMap3.put("name", "testTaxonomy.termName.child2");
Request entityTagRequest3 = new InstanceRequest(entityTagRequestMap3);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "testTaxonomy.termName");
queryResultRow.put("id", "111-222-333");
Collection<Map<String, Object>> taxonomyResultMaps = new ArrayList<>();
Map<String, Object> taxonomyResultMap = new HashMap<>();
taxonomyResultMap.put("name", "testTaxonomy");
taxonomyResultMap.put("id", "12345");
taxonomyResultMaps.add(taxonomyResultMap);
Result taxonomyResult = new Result(taxonomyResultMaps);
Collection<Map<String, Object>> childResult = new ArrayList<>();
Map<String, Object> childResultRow = new HashMap<>();
childResult.add(childResultRow);
childResultRow.put("name", "testTaxonomy.termName.child1");
childResultRow.put("id", "1-1-1");
Map<String, Object> childResultRow2 = new HashMap<>();
childResult.add(childResultRow2);
childResultRow2.put("name", "testTaxonomy.termName.child2");
childResultRow2.put("id", "2-2-2");
Collection<Map<String, Object>> entityResults1 = new ArrayList<>();
Map<String, Object> entityResult1Map1 = new HashMap<>();
entityResult1Map1.put("name", "entity1");
entityResult1Map1.put("id", "111");
entityResults1.add(entityResult1Map1);
Map<String, Object> entityResult1Map2 = new HashMap<>();
entityResult1Map2.put("name", "entity2");
entityResult1Map2.put("id", "222");
entityResults1.add(entityResult1Map2);
Result entityResult1 = new Result(entityResults1);
Collection<Map<String, Object>> entityResults2 = new ArrayList<>();
Map<String, Object> entityResult2Map = new HashMap<>();
entityResult2Map.put("name", "entity3");
entityResult2Map.put("id", "333");
entityResults2.add(entityResult2Map);
Result entityResult2 = new Result(entityResults2);
// mock expectations
// ensure term exists
expect(queryFactory.createTermQuery(userRequest)).andReturn(query);
expect(query.execute()).andReturn(queryResult);
// taxonomy query
expect(taxonomyResourceProvider.getResourceById(capture(taxonomyRequestCapture))).andReturn(taxonomyResult);
// get term children
expect(queryFactory.createTermQuery(capture(termRequestCapture))).andReturn(query);
expect(query.execute()).andReturn(childResult);
// entities with child1 tag
expect(entityResourceProvider.getResources(eq(entityRequest1))).andReturn(entityResult1);
// typeSystem.deleteTag("111", "testTaxonomy.termName.child1");
// typeSystem.deleteTag("222", "testTaxonomy.termName.child1");
entityTagResourceProvider.deleteResourceById(entityTagRequest1);
entityTagResourceProvider.deleteResourceById(entityTagRequest2);
// delete child1 from taxonomy
typeSystem.deleteTag("12345", "testTaxonomy.termName.child1");
// entities with child2 tag
expect(entityResourceProvider.getResources(eq(entityRequest2))).andReturn(entityResult2);
//typeSystem.deleteTag("333", "testTaxonomy.termName.child2");
entityTagResourceProvider.deleteResourceById(entityTagRequest3);
// delete child2 from taxonomy
typeSystem.deleteTag("12345", "testTaxonomy.termName.child2");
// root term being deleted which has no associated tags
expect(entityResourceProvider.getResources(eq(entityRequest3))).andReturn(
new Result(Collections.<Map<String, Object>>emptyList()));
// delete root term from taxonomy
typeSystem.deleteTag("12345", "testTaxonomy.termName");
replay(taxonomyResourceProvider, entityResourceProvider, entityTagResourceProvider, typeSystem, queryFactory, query);
TermResourceProvider provider = new TestTermResourceProvider(
typeSystem, taxonomyResourceProvider, entityResourceProvider, entityTagResourceProvider);
provider.setQueryFactory(queryFactory);
// invoke method being tested
provider.deleteResourceById(userRequest);
Request taxonomyRequest = taxonomyRequestCapture.getValue();
assertEquals(taxonomyRequest.getQueryProperties().get("name"), "testTaxonomy");
assertEquals(taxonomyRequest.getAdditionalSelectProperties().size(), 1);
assertTrue(taxonomyRequest.getAdditionalSelectProperties().contains("id"));
Request childTermRequest = termRequestCapture.getValue();
assertEquals(childTermRequest.<TermPath>getProperty("termPath").getFullyQualifiedName(), "testTaxonomy.termName.");
verify(taxonomyResourceProvider, entityResourceProvider, entityTagResourceProvider, typeSystem, queryFactory, query);
}
@Test
public void testUpdateResourceById() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> termRequestCapture = newCapture();
Capture<Request> tagRequestCapture = newCapture();
TermPath termPath = new TermPath("testTaxonomy", "termName");
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Map<String, Object> requestUpdateProperties = new HashMap<>();
requestUpdateProperties.put("description", "updatedValue");
Request userRequest = new InstanceRequest(requestProperties, requestUpdateProperties);
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "testTaxonomy.termName");
// mock expectations
// term update
expect(queryFactory.createTermQuery(capture(termRequestCapture))).andReturn(query);
expect(query.execute(requestUpdateProperties)).andReturn(queryResult);
// tag updates
expect(queryFactory.createEntityTagQuery(capture(tagRequestCapture))).andReturn(query);
// query response isn't used so just returning null
expect(query.execute(requestUpdateProperties)).andReturn(null);
replay(typeSystem, queryFactory, query);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.updateResourceById(userRequest);
Request request = termRequestCapture.getValue();
assertNull(request.getQueryString());
assertTrue(request.getAdditionalSelectProperties().isEmpty());
assertEquals(request.getQueryProperties().size(), 2);
assertEquals(request.getQueryProperties().get("termPath"), termPath);
assertEquals(request.getQueryProperties().get("name"), termPath.getFullyQualifiedName());
Request tagRequest = tagRequestCapture.getValue();
assertEquals(tagRequest.getQueryString(), "name:testTaxonomy.termName");
assertEquals(tagRequest.getQueryProperties().size(), 1);
assertEquals(tagRequest.getQueryProperties().get("id"), "*");
verify(typeSystem, queryFactory, query);
}
private static class TestTermResourceProvider extends TermResourceProvider {
private ResourceProvider testTaxonomyResourceProvider;
private ResourceProvider testEntityResourceProvider;
private ResourceProvider testEntityTagResourceProvider;
public TestTermResourceProvider(AtlasTypeSystem typeSystem,
ResourceProvider taxonomyResourceProvider) {
super(typeSystem);
testTaxonomyResourceProvider = taxonomyResourceProvider;
}
public TestTermResourceProvider(AtlasTypeSystem typeSystem,
ResourceProvider taxonomyResourceProvider,
ResourceProvider entityResourceProvider,
ResourceProvider entityTagResourceProvider) {
super(typeSystem);
testTaxonomyResourceProvider = taxonomyResourceProvider;
testEntityResourceProvider = entityResourceProvider;
testEntityTagResourceProvider = entityTagResourceProvider;
}
@Override
protected synchronized ResourceProvider getTaxonomyResourceProvider() {
return testTaxonomyResourceProvider;
}
@Override
protected synchronized ResourceProvider getEntityResourceProvider() {
return testEntityResourceProvider;
}
@Override
protected synchronized ResourceProvider getEntityTagResourceProvider() {
return testEntityTagResourceProvider;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.*;
/**
* Unit tests for VertexWrapper.
*/
public class VertexWrapperTest {
@Test
public void testGetVertex() {
Vertex v = createStrictMock(Vertex.class);
ResourceDefinition resourceDefinition = createStrictMock(ResourceDefinition.class);
// just return null for these because they aren't used in this test
expect(resourceDefinition.getPropertyMapper()).andReturn(null);
expect(resourceDefinition.getPropertyValueFormatters()).andReturn(null);
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn("testType");
replay(v, resourceDefinition);
VertexWrapper vWrapper = new VertexWrapper(v, resourceDefinition);
assertEquals(vWrapper.getVertex(), v);
verify(v, resourceDefinition);
}
@SuppressWarnings("unchecked")
@Test
public void testGetProperty() {
String testType = "testType";
String propName = "propName";
String qualifiedPropName = "Prefix.propName";
String propValue = "val";
String formattedValue = "value";
Vertex v = createStrictMock(Vertex.class);
PropertyMapper propertyMapper = createStrictMock(PropertyMapper.class);
PropertyValueFormatter formatter = createStrictMock(PropertyValueFormatter.class);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(propertyMapper.toFullyQualifiedName(propName, testType)).andReturn(qualifiedPropName);
expect(v.getProperty(qualifiedPropName)).andReturn(propValue);
expect(formatter.format(propValue)).andReturn((formattedValue));
replay(v, propertyMapper, formatter);
VertexWrapper vWrapper = new VertexWrapper(v, propertyMapper, Collections.singletonMap(propName, formatter));
assertEquals(vWrapper.getProperty(propName), formattedValue);
// now remove prop
vWrapper.removeProperty(propName);
assertNull(vWrapper.getProperty(propName));
verify(v, propertyMapper, formatter);
}
@SuppressWarnings("unchecked")
@Test
public void testGetProperty2() {
String testType = "testType";
String propName = "propName";
String qualifiedPropName = "Prefix.propName";
String propValue = "val";
String formattedValue = "value";
Vertex v = createStrictMock(Vertex.class);
ResourceDefinition resourceDefinition = createStrictMock(ResourceDefinition.class);
PropertyMapper propertyMapper = createStrictMock(PropertyMapper.class);
PropertyValueFormatter formatter = createStrictMock(PropertyValueFormatter.class);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(resourceDefinition.getPropertyMapper()).andReturn(propertyMapper);
expect(resourceDefinition.getPropertyValueFormatters()).andReturn(Collections.singletonMap(propName, formatter));
expect(propertyMapper.toFullyQualifiedName(propName, testType)).andReturn(qualifiedPropName);
expect(v.getProperty(qualifiedPropName)).andReturn(propValue);
expect(formatter.format(propValue)).andReturn((formattedValue));
replay(v, resourceDefinition, propertyMapper, formatter);
VertexWrapper vWrapper = new VertexWrapper(v, resourceDefinition);
assertEquals(vWrapper.getProperty(propName), formattedValue);
// now remove prop
vWrapper.removeProperty(propName);
assertNull(vWrapper.getProperty(propName));
verify(v, resourceDefinition, propertyMapper, formatter);
}
@Test
public void testGetProperty_removed() {
String testType = "testType";
String propName = "propName";
Vertex v = createStrictMock(Vertex.class);
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
// vertex shouldn't be asked for the removed property
replay(v);
VertexWrapper vWrapper = new VertexWrapper(v, null, Collections.<String, PropertyValueFormatter>emptyMap());
vWrapper.removeProperty(propName);
assertNull(vWrapper.getProperty(propName));
verify(v);
}
@Test
public void testGetPropertyKeys() {
String testType = "testType";
// vertex returns unordered set
Set<String> propertyKeys = new HashSet<>();
propertyKeys.add("foobar");
propertyKeys.add("Prefix.foo");
propertyKeys.add("Prefix.bar");
Vertex v = createStrictMock(Vertex.class);
PropertyMapper propertyMapper = createMock(PropertyMapper.class);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(v.getPropertyKeys()).andReturn(propertyKeys);
expect(propertyMapper.toCleanName("Prefix.bar", testType)).andReturn("bar");
expect(propertyMapper.toCleanName("Prefix.foo", testType)).andReturn("foo");
expect(propertyMapper.toCleanName("foobar", testType)).andReturn("foobar");
replay(v, propertyMapper);
VertexWrapper vWrapper = new VertexWrapper(v, propertyMapper,
Collections.<String, PropertyValueFormatter>emptyMap());
Collection<String> resultKeys = vWrapper.getPropertyKeys();
Iterator<String> propIterator = resultKeys.iterator();
assertEquals(resultKeys.size(), 3);
// natural ordering is applied in vertex wrapper
assertEquals(propIterator.next(), "bar");
assertEquals(propIterator.next(), "foo");
assertEquals(propIterator.next(), "foobar");
verify(v, propertyMapper);
}
@Test
public void testGetPropertyKeys_removed() {
String testType = "testType";
Set<String> propertyKeys = new TreeSet<>();
propertyKeys.add("Prefix.foo");
propertyKeys.add("Prefix.bar");
propertyKeys.add("foobar");
Vertex v = createStrictMock(Vertex.class);
PropertyMapper propertyMapper = createStrictMock(PropertyMapper.class);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(v.getPropertyKeys()).andReturn(propertyKeys);
// natural ordering provided by TreeSet
expect(propertyMapper.toCleanName("Prefix.bar", testType)).andReturn("bar");
expect(propertyMapper.toCleanName("Prefix.foo", testType)).andReturn("foo");
expect(propertyMapper.toCleanName("foobar", testType)).andReturn("foobar");
replay(v, propertyMapper);
VertexWrapper vWrapper = new VertexWrapper(v, propertyMapper,
Collections.<String, PropertyValueFormatter>emptyMap());
// remove props
vWrapper.removeProperty("foo");
vWrapper.removeProperty("foobar");
Collection<String> resultKeys = vWrapper.getPropertyKeys();
assertEquals(resultKeys.size(), 1);
assertTrue(resultKeys.contains("bar"));
verify(v, propertyMapper);
}
@Test
public void testGetPropertyMap() {
String testType = "testType";
Set<String> propertyKeys = new HashSet<>();
propertyKeys.add("Prefix.foo");
propertyKeys.add("Prefix.bar");
propertyKeys.add("foobar");
Vertex v = createMock(Vertex.class);
PropertyMapper propertyMapper = createMock(PropertyMapper.class);
PropertyValueFormatter formatter = createMock(PropertyValueFormatter.class);
Map<String, PropertyValueFormatter> valueFormatters = new HashMap<>();
valueFormatters.put("foo", formatter);
valueFormatters.put("bar", formatter);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(v.getPropertyKeys()).andReturn(propertyKeys);
expect(v.getProperty("Prefix.foo")).andReturn("Prefix.foo:Value");
expect(v.getProperty("Prefix.bar")).andReturn("Prefix.bar:Value");
expect(v.getProperty("foobar")).andReturn("foobarValue");
expect(propertyMapper.toCleanName("Prefix.bar", testType)).andReturn("bar");
expect(propertyMapper.toCleanName("Prefix.foo", testType)).andReturn("foo");
expect(propertyMapper.toCleanName("foobar", testType)).andReturn("foobar");
expect(formatter.format("Prefix.foo:Value")).andReturn("fooValue");
expect(formatter.format("Prefix.bar:Value")).andReturn("barValue");
replay(v, propertyMapper, formatter);
VertexWrapper vWrapper = new VertexWrapper(v, propertyMapper, valueFormatters);
Map<String, Object> resultMap = vWrapper.getPropertyMap();
assertEquals(resultMap.size(), 3);
Iterator<Map.Entry<String, Object>> iter = resultMap.entrySet().iterator();
Map.Entry<String, Object> entry1 = iter.next();
assertEquals(entry1.getKey(), "bar");
assertEquals(entry1.getValue(), "barValue");
Map.Entry<String, Object> entry2 = iter.next();
assertEquals(entry2.getKey(), "foo");
assertEquals(entry2.getValue(), "fooValue");
Map.Entry<String, Object> entry3 = iter.next();
assertEquals(entry3.getKey(), "foobar");
assertEquals(entry3.getValue(), "foobarValue");
verify(v, propertyMapper, formatter);
}
@Test
public void testGetPropertyMap_removed() {
String testType = "testType";
Set<String> propertyKeys = new HashSet<>();
propertyKeys.add("Prefix.foo");
propertyKeys.add("Prefix.bar");
propertyKeys.add("foobar");
Vertex v = createMock(Vertex.class);
PropertyMapper propertyMapper = createMock(PropertyMapper.class);
PropertyValueFormatter formatter = createMock(PropertyValueFormatter.class);
Map<String, PropertyValueFormatter> valueFormatters = new HashMap<>();
valueFormatters.put("foo", formatter);
valueFormatters.put("bar", formatter);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(v.getPropertyKeys()).andReturn(propertyKeys);
expect(v.getProperty("Prefix.bar")).andReturn("Prefix.bar:Value");
expect(v.getProperty("foobar")).andReturn("foobarValue");
expect(propertyMapper.toCleanName("Prefix.bar", testType)).andReturn("bar");
expect(propertyMapper.toCleanName("Prefix.foo", testType)).andReturn("foo");
expect(propertyMapper.toCleanName("foobar", testType)).andReturn("foobar");
expect(formatter.format("Prefix.bar:Value")).andReturn("barValue");
replay(v, propertyMapper, formatter);
VertexWrapper vWrapper = new VertexWrapper(v, propertyMapper, valueFormatters);
//remove "foo" property
vWrapper.removeProperty("foo");
Map<String, Object> resultMap = vWrapper.getPropertyMap();
assertEquals(resultMap.size(), 2);
Iterator<Map.Entry<String, Object>> iter = resultMap.entrySet().iterator();
Map.Entry<String, Object> entry1 = iter.next();
assertEquals(entry1.getKey(), "bar");
assertEquals(entry1.getValue(), "barValue");
Map.Entry<String, Object> entry2 = iter.next();
assertEquals(entry2.getKey(), "foobar");
assertEquals(entry2.getValue(), "foobarValue");
verify(v, propertyMapper, formatter);
}
@Test
public void testIsPropertyRemoved() {
String testType = "testType";
Vertex v = createMock(Vertex.class);
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
replay(v);
VertexWrapper vWrapper = new VertexWrapper(v, null,
Collections.<String, PropertyValueFormatter>emptyMap());
vWrapper.removeProperty("foo");
assertTrue(vWrapper.isPropertyRemoved("foo"));
assertFalse(vWrapper.isPropertyRemoved("bar"));
}
@Test
public void testSetProperty() {
String testType = "testType";
String cleanPropName = "prop1";
String qualifiedPropName = "test.prop1";
String propValue = "newValue";
Vertex v = createStrictMock(Vertex.class);
PropertyMapper propertyMapper = createStrictMock(PropertyMapper.class);
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(propertyMapper.toFullyQualifiedName(cleanPropName, testType)).andReturn(qualifiedPropName);
v.setProperty(qualifiedPropName, propValue);
replay(v, propertyMapper);
VertexWrapper vWrapper = new VertexWrapper(
v, propertyMapper, Collections.<String, PropertyValueFormatter>emptyMap());
vWrapper.setProperty(cleanPropName, propValue);
verify(v, propertyMapper);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.catalog.CollectionRequest;
import org.apache.atlas.catalog.InstanceRequest;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.testng.annotations.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for EntityResourceDefinition.
*/
public class EntityResourceDefinitionTest {
@Test
public void testGetIdPropertyName() {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
assertEquals(entityDefinition.getIdPropertyName(), "id");
}
@Test
public void testGetTypeName() {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
assertNull(entityDefinition.getTypeName());
}
@Test
public void testResolveHref() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "foo");
ResourceDefinition entityDefinition = new EntityResourceDefinition();
String href = entityDefinition.resolveHref(resourceProps);
assertEquals(href, "v1/entities/111-222-333");
}
// Because we don't currently support entity creation, this method is basically a no-op.
@Test
public void testValidate() throws Exception {
Request request = new InstanceRequest(Collections.<String, Object>emptyMap());
ResourceDefinition entityDefinition = new EntityResourceDefinition();
entityDefinition.validateCreatePayload(request);
}
// Because we don't currently support entity creation, no properties are registered
@Test
public void testGetPropertyDefinitions() {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
assertTrue(entityDefinition.getPropertyDefinitions().isEmpty());
}
@Test
public void testFilterProperties_Instance() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("fooBar", "fooBarVal");
resourceProps.put("other", "otherVal");
Request request = new InstanceRequest(resourceProps);
ResourceDefinition entityDefinition = new EntityResourceDefinition();
// no filtering should occur for entity instances
assertEquals(entityDefinition.filterProperties(request, resourceProps), resourceProps);
}
@Test
public void testFilterProperties_Collection() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("fooBar", "fooBarVal");
resourceProps.put("other", "otherVal");
Request request = new CollectionRequest(resourceProps, "someProperty:someValue");
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition entityDefinition = new EntityResourceDefinition();
// no filtering should occur for entity instances
Map<String, Object> filteredProps = entityDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProps.size(), 4);
// registered collection props
assertTrue(filteredProps.containsKey("name"));
assertTrue(filteredProps.containsKey("id"));
assertTrue(filteredProps.containsKey("type"));
// added prop
assertTrue(filteredProps.containsKey("foo"));
}
@Test
public void testGetProjections() {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
Map<String, Projection> projections = entityDefinition.getProjections();
assertEquals(projections.size(), 3);
assertTrue(projections.containsKey("tags"));
assertTrue(projections.containsKey("traits"));
assertTrue(projections.containsKey("default"));
}
@Test
public void testGetRelations() {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
Map<String, Relation> relations = entityDefinition.getRelations();
assertEquals(relations.size(), 2);
assertTrue(relations.containsKey("tags"));
assertTrue(relations.containsKey("traits"));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.catalog.CollectionRequest;
import org.apache.atlas.catalog.InstanceRequest;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.testng.annotations.Test;
import java.util.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for EntityTagResourceDefinition.
*/
public class EntityTagResourceDefinitionTest {
@Test
public void testGetIdPropertyName() {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
assertEquals(entityTagDefinition.getIdPropertyName(), "name");
}
@Test
public void testGetTypeName() {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
assertNull(entityTagDefinition.getTypeName());
}
@Test
public void testResolveHref() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("name", "taxonomy1.term1.term11");
resourceProps.put(EntityTagResourceDefinition.ENTITY_GUID_PROPERTY, "11-22-33");
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
String href = entityTagDefinition.resolveHref(resourceProps);
assertEquals(href, "v1/entities/11-22-33/tags/taxonomy1.term1.term11");
}
@Test
public void testValidate() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "taxonomy1.termName");
Request request = new InstanceRequest(properties);
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
entityTagDefinition.validateCreatePayload(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_missingName() throws Exception {
Map<String, Object> properties = new HashMap<>();
Request request = new InstanceRequest(properties);
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
entityTagDefinition.validateCreatePayload(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_invalidProperty() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "foo");
properties.put("description", "desc");
Request request = new InstanceRequest(properties);
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
entityTagDefinition.validateCreatePayload(request);
}
@Test
public void testGetPropertyDefinitions() {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
Collection<AttributeDefinition> propertyDefinitions = entityTagDefinition.getPropertyDefinitions();
assertEquals(propertyDefinitions.size(), 1);
Set<String> defNames = new HashSet<>();
for (AttributeDefinition def : propertyDefinitions) {
defNames.add(def.name);
}
assertTrue(defNames.contains("name"));
}
@Test
public void testFilterProperties_Instance() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
resourceProps.put("acceptable_use", "something");
resourceProps.put("available_as_tag", true);
resourceProps.put("other", "otherVal");
Request request = new InstanceRequest(resourceProps);
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
Map<String, Object> filteredProperties = entityTagDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProperties.size(), 4);
// registered collection props
assertTrue(filteredProperties.containsKey("name"));
assertTrue(filteredProperties.containsKey("description"));
assertTrue(filteredProperties.containsKey("creation_time"));
// added prop
assertTrue(filteredProperties.containsKey("foo"));
}
@Test
public void testFilterProperties_Collection() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
resourceProps.put("acceptable_use", "something");
resourceProps.put("available_as_tag", true);
resourceProps.put("other", "otherVal");
Request request = new CollectionRequest(resourceProps, "someProperty:someValue");
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
Map<String, Object> filteredProps = entityTagDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProps.size(), 3);
// registered collection props
assertTrue(filteredProps.containsKey("name"));
assertTrue(filteredProps.containsKey("description"));
// added prop
assertTrue(filteredProps.containsKey("foo"));
}
@Test
public void testGetProjections() {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
Map<String, Projection> projections = entityTagDefinition.getProjections();
assertEquals(projections.size(), 1);
assertTrue(projections.containsKey("terms"));
}
@Test
public void testGetRelations() {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
Map<String, Relation> relations = entityTagDefinition.getRelations();
assertTrue(relations.isEmpty());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.catalog.CollectionRequest;
import org.apache.atlas.catalog.InstanceRequest;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.testng.annotations.Test;
import java.util.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for TaxonomyResourceDefinition.
*/
public class TaxonomyResourceDefinitionTest {
@Test
public void testGetIdPropertyName() {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
assertEquals(taxonomyDefinition.getIdPropertyName(), "name");
}
@Test
public void testGetTypeName() {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
assertEquals(taxonomyDefinition.getTypeName(), "Taxonomy");
}
@Test
public void testResolveHref() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "foo");
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
String href = taxonomyDefinition.resolveHref(resourceProps);
assertEquals(href, "v1/taxonomies/foo");
}
@Test
public void testValidate() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "taxonomyName");
properties.put("description", "foo");
Request request = new InstanceRequest(properties);
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
taxonomyDefinition.validateCreatePayload(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_missingName() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("description", "foo");
Request request = new InstanceRequest(properties);
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
taxonomyDefinition.validateCreatePayload(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_invalidProperty() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "foo");
properties.put("unknownProperty", "value");
Request request = new InstanceRequest(properties);
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
taxonomyDefinition.validateCreatePayload(request);
}
@Test
public void testGetPropertyDefinitions() {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
Collection<AttributeDefinition> propertyDefinitions = taxonomyDefinition.getPropertyDefinitions();
assertEquals(propertyDefinitions.size(), 3);
Set<String> defNames = new HashSet<>();
for (AttributeDefinition def : propertyDefinitions) {
defNames.add(def.name);
}
assertTrue(defNames.contains("name"));
assertTrue(defNames.contains("description"));
}
@Test
public void testFilterProperties_Instance() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
Request request = new InstanceRequest(resourceProps);
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
Map<String, Object> filteredProperties = taxonomyDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProperties.size(), 4);
// registered collection props
assertTrue(filteredProperties.containsKey("name"));
assertTrue(filteredProperties.containsKey("description"));
assertTrue(filteredProperties.containsKey("creation_time"));
// added prop
assertTrue(filteredProperties.containsKey("foo"));
}
@Test
public void testFilterProperties_Collection() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
Request request = new CollectionRequest(resourceProps, "someProperty:someValue");
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
Map<String, Object> filteredProps = taxonomyDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProps.size(), 3);
// registered collection props
assertTrue(filteredProps.containsKey("name"));
assertTrue(filteredProps.containsKey("description"));
// added prop
assertTrue(filteredProps.containsKey("foo"));
}
@Test
public void testGetProjections() {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
Map<String, Projection> projections = taxonomyDefinition.getProjections();
assertEquals(projections.size(), 1);
assertTrue(projections.containsKey("terms"));
}
@Test
public void testGetRelations() {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
Map<String, Relation> relations = taxonomyDefinition.getRelations();
assertTrue(relations.isEmpty());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.catalog.CollectionRequest;
import org.apache.atlas.catalog.InstanceRequest;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.testng.annotations.Test;
import java.util.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for TermResourceDefinition.
*/
public class TermResourceDefinitionTest {
@Test
public void testGetIdPropertyName() {
ResourceDefinition termDefinition = new TermResourceDefinition();
assertEquals(termDefinition.getIdPropertyName(), "name");
}
@Test
public void testGetTypeName() {
ResourceDefinition termDefinition = new TermResourceDefinition();
assertEquals(termDefinition.getTypeName(), "Term");
}
@Test
public void testResolveHref() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("name", "taxonomy1.term1.term11");
ResourceDefinition termDefinition = new TermResourceDefinition();
String href = termDefinition.resolveHref(resourceProps);
assertEquals(href, "v1/taxonomies/taxonomy1/terms/term1/terms/term11");
}
@Test
public void testValidate() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "taxonomy1.termName");
properties.put("description", "foo");
properties.put("available_as_tag", true);
properties.put("acceptable_use", "something");
Request request = new InstanceRequest(properties);
ResourceDefinition termDefinition = new TermResourceDefinition();
termDefinition.validateCreatePayload(request);
}
@Test
public void testValidate_nameOnly() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "taxonomy1.termName");
Request request = new InstanceRequest(properties);
ResourceDefinition termDefinition = new TermResourceDefinition();
termDefinition.validateCreatePayload(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_invalidTermName() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "NotQualifiedTermName");
properties.put("description", "foo");
properties.put("available_as_tag", true);
Request request = new InstanceRequest(properties);
ResourceDefinition termDefinition = new TermResourceDefinition();
termDefinition.validateCreatePayload(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_missingName() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("description", "foo");
Request request = new InstanceRequest(properties);
ResourceDefinition termDefinition = new TermResourceDefinition();
termDefinition.validateCreatePayload(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_invalidProperty() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "foo");
properties.put("unknownProperty", "value");
Request request = new InstanceRequest(properties);
ResourceDefinition termDefinition = new TermResourceDefinition();
termDefinition.validateCreatePayload(request);
}
@Test
public void testGetPropertyDefinitions() {
ResourceDefinition termDefinition = new TermResourceDefinition();
Collection<AttributeDefinition> propertyDefinitions = termDefinition.getPropertyDefinitions();
assertEquals(propertyDefinitions.size(), 4);
Set<String> defNames = new HashSet<>();
for (AttributeDefinition def : propertyDefinitions) {
defNames.add(def.name);
}
assertTrue(defNames.contains("name"));
assertTrue(defNames.contains("description"));
assertTrue(defNames.contains("available_as_tag"));
assertTrue(defNames.contains("acceptable_use"));
}
@Test
public void testFilterProperties_Instance() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
resourceProps.put("acceptable_use", "something");
resourceProps.put("available_as_tag", true);
resourceProps.put("other", "otherVal");
Request request = new InstanceRequest(resourceProps);
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition termDefinition = new TermResourceDefinition();
Map<String, Object> filteredProperties = termDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProperties.size(), 6);
// registered collection props
assertTrue(filteredProperties.containsKey("name"));
assertTrue(filteredProperties.containsKey("description"));
assertTrue(filteredProperties.containsKey("available_as_tag"));
assertTrue(filteredProperties.containsKey("acceptable_use"));
assertTrue(filteredProperties.containsKey("creation_time"));
// added prop
assertTrue(filteredProperties.containsKey("foo"));
}
@Test
public void testFilterProperties_Collection() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
resourceProps.put("acceptable_use", "something");
resourceProps.put("available_as_tag", true);
resourceProps.put("other", "otherVal");
Request request = new CollectionRequest(resourceProps, "someProperty:someValue");
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition termDefinition = new TermResourceDefinition();
Map<String, Object> filteredProps = termDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProps.size(), 3);
// registered collection props
assertTrue(filteredProps.containsKey("name"));
assertTrue(filteredProps.containsKey("description"));
// added prop
assertTrue(filteredProps.containsKey("foo"));
}
@Test
public void testGetProjections() {
ResourceDefinition termDefinition = new TermResourceDefinition();
Map<String, Projection> projections = termDefinition.getProjections();
assertEquals(projections.size(), 2);
assertTrue(projections.containsKey("terms"));
assertTrue(projections.containsKey("hierarchy"));
}
@Test
public void testGetRelations() {
ResourceDefinition termDefinition = new TermResourceDefinition();
Map<String, Relation> relations = termDefinition.getRelations();
assertTrue(relations.isEmpty());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.persistence.Id;
import org.testng.annotations.Test;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for TagRelation
*/
public class TagRelationTest {
@Test
public void testIsDeleted() {
Vertex v = createStrictMock(Vertex.class);
expect(v.getProperty(Constants.STATE_PROPERTY_KEY)).andReturn(Id.EntityState.ACTIVE.name());
replay(v);
BaseRelation relation = new TagRelation();
assertFalse(relation.isDeleted(v));
}
@Test
public void testIsDeleted_false() {
Vertex v = createStrictMock(Vertex.class);
expect(v.getProperty(Constants.STATE_PROPERTY_KEY)).andReturn(Id.EntityState.DELETED.name());
replay(v);
BaseRelation relation = new TagRelation();
assertTrue(relation.isDeleted(v));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.VertexWrapper;
import org.testng.annotations.Test;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for AlwaysQueryExpression.
*/
public class AlwaysQueryExpressionTest {
@Test
public void testEvaluate() {
VertexWrapper v = createStrictMock(VertexWrapper.class);
replay(v);
QueryExpression expression = new AlwaysQueryExpression();
// always returns true
assertTrue(expression.evaluate(v));
verify(v);
}
@Test
public void testEvaluate_negated() {
VertexWrapper v = createStrictMock(VertexWrapper.class);
replay(v);
QueryExpression expression = new AlwaysQueryExpression();
expression.setNegate();
// always returns true
assertFalse(expression.evaluate(v));
assertTrue(expression.isNegate());
verify(v);
}
@Test
public void testGetProperties() {
VertexWrapper v = createStrictMock(VertexWrapper.class);
replay(v);
QueryExpression expression = new AlwaysQueryExpression();
assertTrue(expression.getProperties().isEmpty());
verify(v);
}
@Test
public void testAsPipe() {
VertexWrapper v = createStrictMock(VertexWrapper.class);
replay(v);
QueryExpression expression = new AlwaysQueryExpression();
assertNull(expression.asPipe());
verify(v);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.easymock.Capture;
import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
/**
* Unit tests for AtlasEntityQuery.
*/
@SuppressWarnings("unchecked")
public class AtlasEntityQueryTest {
//todo: add tests for instance query and getInitialPipeline()
@Test
public void testExecute_Collection() throws Exception {
AtlasGraph graph = createStrictMock(AtlasGraph.class);
QueryExpression expression = createStrictMock(QueryExpression.class);
ResourceDefinition resourceDefinition = createStrictMock(ResourceDefinition.class);
Request request = createStrictMock(Request.class);
GremlinPipeline initialPipeline = createStrictMock(GremlinPipeline.class);
Pipe queryPipe = createStrictMock(Pipe.class);
Pipe expressionPipe = createStrictMock(Pipe.class);
Pipe notDeletedPipe = createStrictMock(Pipe.class);
GremlinPipeline rootPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline queryPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline expressionPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline notDeletedPipeline = createStrictMock(GremlinPipeline.class);
Vertex vertex1 = createStrictMock(Vertex.class);
VertexWrapper vertex1Wrapper = createStrictMock(VertexWrapper.class);
List<Vertex> results = new ArrayList<>();
results.add(vertex1);
Map<String, Object> vertex1PropertyMap = new HashMap<>();
vertex1PropertyMap.put("prop1", "prop1.value1");
vertex1PropertyMap.put("prop2", "prop2.value1");
Map<String, Object> filteredVertex1PropertyMap = new HashMap<>();
filteredVertex1PropertyMap.put("prop1", "prop1.value1");
// mock expectations
expect(initialPipeline.add(queryPipe)).andReturn(queryPipeline);
expect(initialPipeline.add(notDeletedPipe)).andReturn(notDeletedPipeline);
expect(initialPipeline.as("root")).andReturn(rootPipeline);
expect(expression.asPipe()).andReturn(expressionPipe);
expect(rootPipeline.add(expressionPipe)).andReturn(expressionPipeline);
expect(expressionPipeline.back("root")).andReturn(rootPipeline);
expect(rootPipeline.toList()).andReturn(results);
graph.commit();
expect(vertex1Wrapper.getPropertyMap()).andReturn(vertex1PropertyMap);
expect(resourceDefinition.filterProperties(request, vertex1PropertyMap)).andReturn(filteredVertex1PropertyMap);
expect(resourceDefinition.resolveHref(filteredVertex1PropertyMap)).andReturn("/foo/bar");
expect(request.getCardinality()).andReturn(Request.Cardinality.COLLECTION);
replay(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe,
notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline,
vertex1, vertex1Wrapper);
// end mock expectations
AtlasEntityQuery query = new TestAtlasEntityQuery(expression, resourceDefinition, request,
initialPipeline, queryPipe, notDeletedPipe, graph, vertex1Wrapper);
// invoke method being tested
Collection<Map<String, Object>> queryResults = query.execute();
assertEquals(queryResults.size(), 1);
Map<String, Object> queryResultMap = queryResults.iterator().next();
assertEquals(queryResultMap.size(), 2);
assertEquals(queryResultMap.get("prop1"), "prop1.value1");
assertEquals(queryResultMap.get("href"), "/foo/bar");
verify(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe,
notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline,
vertex1, vertex1Wrapper);
}
@Test
public void testExecute_Collection_rollbackOnException() throws Exception {
AtlasGraph graph = createStrictMock(AtlasGraph.class);
QueryExpression expression = createStrictMock(QueryExpression.class);
ResourceDefinition resourceDefinition = createStrictMock(ResourceDefinition.class);
Request request = createStrictMock(Request.class);
GremlinPipeline initialPipeline = createStrictMock(GremlinPipeline.class);
Pipe queryPipe = createStrictMock(Pipe.class);
Pipe expressionPipe = createStrictMock(Pipe.class);
Pipe notDeletedPipe = createStrictMock(Pipe.class);
GremlinPipeline rootPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline queryPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline expressionPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline notDeletedPipeline = createStrictMock(GremlinPipeline.class);
// mock expectations
expect(initialPipeline.add(queryPipe)).andReturn(queryPipeline);
expect(initialPipeline.add(notDeletedPipe)).andReturn(notDeletedPipeline);
expect(initialPipeline.as("root")).andReturn(rootPipeline);
expect(expression.asPipe()).andReturn(expressionPipe);
expect(rootPipeline.add(expressionPipe)).andReturn(expressionPipeline);
expect(expressionPipeline.back("root")).andReturn(rootPipeline);
expect(rootPipeline.toList()).andThrow(new RuntimeException("something bad happened"));
graph.rollback();
replay(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe,
notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline);
// end mock expectations
AtlasEntityQuery query = new TestAtlasEntityQuery(expression, resourceDefinition, request,
initialPipeline, queryPipe, notDeletedPipe, graph, null);
try {
// invoke method being tested
query.execute();
fail("expected exception");
} catch (RuntimeException e) {
assertEquals(e.getMessage(), "something bad happened");
}
verify(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe,
notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline);
}
@Test
public void testExecute_Collection_update() throws Exception {
AtlasGraph graph = createStrictMock(AtlasGraph.class);
QueryExpression expression = createStrictMock(QueryExpression.class);
ResourceDefinition resourceDefinition = createStrictMock(ResourceDefinition.class);
Request request = createStrictMock(Request.class);
GremlinPipeline initialPipeline = createStrictMock(GremlinPipeline.class);
Pipe queryPipe = createStrictMock(Pipe.class);
Pipe expressionPipe = createStrictMock(Pipe.class);
Pipe notDeletedPipe = createStrictMock(Pipe.class);
GremlinPipeline rootPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline queryPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline expressionPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline notDeletedPipeline = createStrictMock(GremlinPipeline.class);
Vertex vertex1 = createStrictMock(Vertex.class);
VertexWrapper vertex1Wrapper = createStrictMock(VertexWrapper.class);
Capture<Long> modifiedTimestampCapture = newCapture();
List<Vertex> results = new ArrayList<>();
results.add(vertex1);
Map<String, Object> vertex1PropertyMap = new HashMap<>();
vertex1PropertyMap.put("prop1", "prop1.value1");
vertex1PropertyMap.put("prop2", "prop2.value1");
Map<String, Object> filteredVertex1PropertyMap = new HashMap<>();
filteredVertex1PropertyMap.put("prop1", "prop1.value1");
Map<String, Object> updateProperties = new HashMap<>();
updateProperties.put("prop3", "newValue");
// mock expectations
expect(initialPipeline.add(queryPipe)).andReturn(queryPipeline);
expect(initialPipeline.add(notDeletedPipe)).andReturn(notDeletedPipeline);
expect(initialPipeline.as("root")).andReturn(rootPipeline);
expect(expression.asPipe()).andReturn(expressionPipe);
expect(rootPipeline.add(expressionPipe)).andReturn(expressionPipeline);
expect(expressionPipeline.back("root")).andReturn(rootPipeline);
expect(rootPipeline.toList()).andReturn(results);
graph.commit();
vertex1Wrapper.setProperty("prop3", "newValue");
vertex1Wrapper.setProperty(eq(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY), capture(modifiedTimestampCapture));
expect(vertex1Wrapper.getPropertyMap()).andReturn(vertex1PropertyMap);
expect(resourceDefinition.filterProperties(request, vertex1PropertyMap)).andReturn(filteredVertex1PropertyMap);
expect(resourceDefinition.resolveHref(filteredVertex1PropertyMap)).andReturn("/foo/bar");
expect(request.getCardinality()).andReturn(Request.Cardinality.COLLECTION);
replay(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe,
notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline,
vertex1, vertex1Wrapper);
// end mock expectations
AtlasEntityQuery query = new TestAtlasEntityQuery(expression, resourceDefinition, request,
initialPipeline, queryPipe, notDeletedPipe, graph, vertex1Wrapper);
long startTime = System.currentTimeMillis();
// invoke method being tested
Collection<Map<String, Object>> queryResults = query.execute(updateProperties);
long endTime = System.currentTimeMillis();
assertEquals(queryResults.size(), 1);
Map<String, Object> queryResultMap = queryResults.iterator().next();
assertEquals(queryResultMap.size(), 2);
assertEquals(queryResultMap.get("prop1"), "prop1.value1");
assertEquals(queryResultMap.get("href"), "/foo/bar");
long modifiedTimestamp = modifiedTimestampCapture.getValue();
assertTrue(modifiedTimestamp >= startTime && modifiedTimestamp <= endTime);
verify(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe,
notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline,
vertex1, vertex1Wrapper);
}
private class TestAtlasEntityQuery extends AtlasEntityQuery {
private final GremlinPipeline initialPipeline;
private final Pipe queryPipe;
private final Pipe notDeletedPipe;
private final AtlasGraph graph;
private final VertexWrapper vWrapper;
public TestAtlasEntityQuery(QueryExpression queryExpression,
ResourceDefinition resourceDefinition,
Request request,
GremlinPipeline initialPipeline,
Pipe queryPipe,
Pipe notDeletedPipe,
AtlasGraph graph,
VertexWrapper vWrapper) {
super(queryExpression, resourceDefinition, request);
this.initialPipeline = initialPipeline;
this.queryPipe = queryPipe;
this.notDeletedPipe = notDeletedPipe;
this.graph = graph;
this.vWrapper = vWrapper;
}
@Override
protected GremlinPipeline getRootVertexPipeline() {
return initialPipeline;
}
@Override
protected Pipe getQueryPipe() {
return queryPipe;
}
@Override
protected Pipe getNotDeletedPipe() {
return notDeletedPipe;
}
@Override
protected AtlasGraph getGraph() {
return graph;
}
@Override
protected VertexWrapper wrapVertex(Vertex v) {
return vWrapper;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.CollectionRequest;
import org.apache.atlas.catalog.InstanceRequest;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TermPath;
import org.apache.atlas.catalog.definition.EntityResourceDefinition;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.testng.annotations.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.testng.Assert.assertEquals;
/**
* Unit tests for QueryFactory.
*/
public class QueryFactoryTest {
@Test
public void testCreateTaxonomyQuery() throws Exception {
Map<String, Object> requestProps = new HashMap<>();
requestProps.put("name", "test_taxonomy");
Request request = new InstanceRequest(requestProps);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "test_taxonomy");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCreateTermQuery() throws Exception {
Map<String, Object> requestProps = new HashMap<>();
requestProps.put("name", "test_taxonomy.term1");
requestProps.put("termPath", new TermPath("test_taxonomy.term1"));
Request request = new InstanceRequest(requestProps);
QueryFactory factory = new QueryFactory();
AtlasTermQuery query = (AtlasTermQuery) factory.createTermQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "test_taxonomy.term1");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Term");
}
@Test
public void testCreateEntityQuery() throws Exception {
Map<String, Object> requestProps = new HashMap<>();
requestProps.put("id", "foo");
Request request = new InstanceRequest(requestProps);
QueryFactory factory = new QueryFactory();
AtlasEntityQuery query = (AtlasEntityQuery) factory.createEntityQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermQueryExpression.class);
assertEquals(queryExpression.getField(), "id");
assertEquals(queryExpression.getExpectedValue(), "foo");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getClass(), EntityResourceDefinition.class);
}
@Test
public void testCreateEntityTagQuery() throws Exception {
Map<String, Object> requestProps = new HashMap<>();
requestProps.put("id", "entity_id");
requestProps.put("name", "test_taxonomy.term1");
Request request = new InstanceRequest(requestProps);
QueryFactory factory = new QueryFactory();
AtlasEntityTagQuery query = (AtlasEntityTagQuery) factory.createEntityTagQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "test_taxonomy.term1");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getClass(), EntityTagResourceDefinition.class);
}
@Test
public void testCollectionQuery_TermQuery() throws Exception {
String queryString = "name:test_taxonomy";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "test_taxonomy");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCollectionQuery_PrefixQuery() throws Exception {
String queryString = "name:t*";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), PrefixQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "t");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCollectionQuery_TermRangeQuery() throws Exception {
String queryString = "creation_time:[2013-01-01:07:29:00 TO 2017-01-02]";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermRangeQueryExpression.class);
assertEquals(queryExpression.getField(), "creation_time");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCollectionQuery_WildcardQuery() throws Exception {
String queryString = "name:ta?onomy";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), WildcardQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "ta?onomy");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCollectionQuery_BooleanQuery() throws Exception {
String queryString = "name:foo OR name:bar";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), BooleanQueryExpression.class);
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCollectionQuery_ProjectionQuery() throws Exception {
String queryString = "relation/name:foo";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), ProjectionQueryExpression.class);
ProjectionQueryExpression projectionExpression = (ProjectionQueryExpression) queryExpression;
QueryExpression underlyingExpression = projectionExpression.getUnderlyingExpression();
assertEquals(underlyingExpression.getClass(), TermQueryExpression.class);
assertEquals(underlyingExpression.getField(), QueryFactory.escape("relation/name"));
assertEquals(underlyingExpression.getExpectedValue(), "foo");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
}
......@@ -200,18 +200,12 @@ atlas.authorizer.impl=SIMPLE
#atlas.graph.storage.lock.retries=10
#atlas.graph.storage.cache.db-cache-time=120000
######### Business Catalog #########
atlas.taxonomy.default.name=Catalog
######### CSRF Configs #########
atlas.rest-csrf.enabled=true
atlas.rest-csrf.browser-useragents-regex=^Mozilla.*,^Opera.*,^Chrome.*
atlas.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD,TRACE
atlas.rest-csrf.custom-header=X-XSRF-HEADER
######### Enable Taxonomy #########
atlas.feature.taxonomy.enable=true
############ KNOX Configs ################
#atlas.sso.knox.browser.useragent=Mozilla,Chrome,Opera
#atlas.sso.knox.enabled=true
......
......@@ -2,8 +2,8 @@
##r-READ, w-WRITE, u-UPDATE, d-DELETE
##Policy_Name;;User_Name1:Operations_Allowed,User_Name2:Operations_Allowed;;Group_Name1:Operations_Allowed,Group_Name2:Operations_Allowed;;Resource_Type1:Resource_Name,Resource_Type2:Resource_Name
##
adminPolicy;;admin:rwud;;ROLE_ADMIN:rwud;;type:*,entity:*,operation:*,taxonomy:*,term:*,relationship:*
dataScientistPolicy;;;;DATA_SCIENTIST:r;;type:*,entity:*,taxonomy:*,term:*,relationship:*
dataStewardPolicy;;;;DATA_STEWARD:rwu;;type:*,entity:*,taxonomy:*,term:*,relationship:*
hadoopPolicy;;;;hadoop:rwud;;type:*,entity:*,operation:*,taxonomy:*,term:*,relationship:*
adminPolicy;;admin:rwud;;ROLE_ADMIN:rwud;;type:*,entity:*,operation:*,relationship:*
dataScientistPolicy;;;;DATA_SCIENTIST:r;;type:*,entity:*,relationship:*
dataStewardPolicy;;;;DATA_STEWARD:rwu;;type:*,entity:*,relationship:*
hadoopPolicy;;;;hadoop:rwud;;type:*,entity:*,operation:*,relationship:*
rangerTagSyncPolicy;;;;RANGER_TAG_SYNC:r;;type:*,entity:*
......@@ -665,7 +665,6 @@
<module>shaded/hbase-server-shaded</module>
<module>repository</module>
<module>authorization</module>
<module>catalog</module>
<module>dashboardv2</module>
<module>webapp</module>
<module>docs</module>
......@@ -1498,12 +1497,6 @@
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-catalog</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>hive-bridge-shim</artifactId>
<version>${project.version}</version>
</dependency>
......
......@@ -2,8 +2,8 @@
##r-READ, w-WRITE, u-UPDATE, d-DELETE
##Policy_Name;;User_Name1:Operations_Allowed,User_Name2:Operations_Allowed;;Group_Name1:Operations_Allowed,Group_Name2:Operations_Allowed;;Resource_Type1:Resource_Name,Resource_Type2:Resource_Name
##
adminPolicy;;admin:rwud;;ROLE_ADMIN:rwud;;type:*,entity:*,operation:*,taxonomy:*,term:*
dataScientistPolicy;;;;DATA_SCIENTIST:r;;type:*,entity:*,taxonomy:*,term:*
dataStewardPolicy;;;;DATA_STEWARD:rwu;;type:*,entity:*,taxonomy:*,term:*
hadoopPolicy;;;;hadoop:rwud;;type:*,entity:*,operation:*,taxonomy:*,term:*
adminPolicy;;admin:rwud;;ROLE_ADMIN:rwud;;type:*,entity:*,operation:*
dataScientistPolicy;;;;DATA_SCIENTIST:r;;type:*,entity:*
dataStewardPolicy;;;;DATA_STEWARD:rwu;;type:*,entity:*
hadoopPolicy;;;;hadoop:rwud;;type:*,entity:*,operation:*
rangerTagSyncPolicy;;;;RANGER_TAG_SYNC:r;;type:*,entity:*
......@@ -70,45 +70,6 @@
<log4j.configuration.url>file:/${project.build.directory}/../../distro/src/conf/atlas-log4j.xml</log4j.configuration.url>
</properties>
</profile>
<profile>
<id>titan1</id>
<!-- remove conflicting lucene/titan versions from catalog when using titan 1 -->
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-catalog</artifactId>
<version>${project.version}</version>
<exclusions>
<exclusion>
<groupId>com.thinkaurelius.titan</groupId>
<artifactId>titan-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-catalog</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</profile>
</profiles>
<dependencies>
......@@ -172,11 +133,6 @@
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-catalog</artifactId>
</dependency>
<!-- Zookeeper, curator -->
<dependency>
<groupId>org.apache.curator</groupId>
......
......@@ -101,7 +101,6 @@ public class AdminResource {
private static final String BROWSER_USER_AGENT_PARAM = "atlas.rest-csrf.browser-useragents-regex";
private static final String CUSTOM_METHODS_TO_IGNORE_PARAM = "atlas.rest-csrf.methods-to-ignore";
private static final String CUSTOM_HEADER_PARAM = "atlas.rest-csrf.custom-header";
private static final String isTaxonomyEnabled = "atlas.feature.taxonomy.enable";
private static final String isEntityUpdateAllowed = "atlas.entity.update.allowed";
private static final String isEntityCreateAllowed = "atlas.entity.create.allowed";
private static final String editableEntityTypes = "atlas.ui.editable.entity.types";
......@@ -242,12 +241,7 @@ public class AdminResource {
}
Response response;
Boolean enableTaxonomy = false;
try {
if(atlasProperties != null) {
enableTaxonomy = atlasProperties.getBoolean(isTaxonomyEnabled, false);
}
boolean isEntityUpdateAccessAllowed = false;
boolean isEntityCreateAccessAllowed = false;
Authentication auth = SecurityContextHolder.getContext().getAuthentication();
......@@ -272,7 +266,6 @@ public class AdminResource {
responseData.put(BROWSER_USER_AGENT_PARAM, AtlasCSRFPreventionFilter.BROWSER_USER_AGENTS_DEFAULT);
responseData.put(CUSTOM_METHODS_TO_IGNORE_PARAM, AtlasCSRFPreventionFilter.METHODS_TO_IGNORE_DEFAULT);
responseData.put(CUSTOM_HEADER_PARAM, AtlasCSRFPreventionFilter.HEADER_DEFAULT);
responseData.put(isTaxonomyEnabled, enableTaxonomy);
responseData.put(isEntityUpdateAllowed, isEntityUpdateAccessAllowed);
responseData.put(isEntityCreateAllowed, isEntityCreateAccessAllowed);
responseData.put(editableEntityTypes, getEditableEntityTypes(atlasProperties));
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import org.apache.atlas.catalog.JsonSerializer;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.ResourceProvider;
import org.apache.atlas.catalog.Result;
import org.apache.atlas.catalog.exception.CatalogException;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.exception.InvalidQueryException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.repository.graph.AtlasGraphProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.UriInfo;
import javax.xml.bind.annotation.XmlRootElement;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.Collection;
import java.util.Map;
/**
* Base class for all v1 API services.
*/
public abstract class BaseService {
private static final Gson gson = new Gson();
private final Logger LOG = LoggerFactory.getLogger(getClass());
private final static JsonSerializer serializer = new JsonSerializer();
protected Result getResource(ResourceProvider provider, Request request)
throws ResourceNotFoundException {
try {
return provider.getResourceById(request);
} catch (RuntimeException e) {
throw wrapRuntimeException(e);
}
}
protected Result getResources(ResourceProvider provider, Request request)
throws ResourceNotFoundException, InvalidQueryException {
try {
return provider.getResources(request);
} catch (RuntimeException e) {
LOG.error("Error while retrieving taxonomy ", e);
throw wrapRuntimeException(e);
}
}
protected void createResource(ResourceProvider provider, Request request) throws CatalogException {
try {
provider.createResource(request);
} catch (RuntimeException e) {
throw wrapRuntimeException(e);
}
}
protected void updateResource(ResourceProvider provider, Request request) throws CatalogException {
try {
provider.updateResourceById(request);
} catch (RuntimeException e) {
throw wrapRuntimeException(e);
}
}
protected void deleteResource(ResourceProvider provider, Request request) throws CatalogException {
try {
provider.deleteResourceById(request);
} catch (RuntimeException e) {
throw wrapRuntimeException(e);
}
}
protected Collection<String> createResources(ResourceProvider provider, Request request) throws CatalogException {
try {
return provider.createResources(request);
} catch (RuntimeException e) {
throw wrapRuntimeException(e);
}
}
protected String getQueryString(@Context UriInfo ui) {
String uri = ui.getRequestUri().toASCIIString();
int qsBegin = uri.indexOf("?");
return (qsBegin == -1) ? null : uri.substring(qsBegin + 1);
}
protected <T extends Map> T parsePayload(String body) throws InvalidPayloadException {
T properties;
try {
properties = gson.<T>fromJson(body, Map.class);
} catch (JsonSyntaxException e) {
LOG.info("Unable to parse json in request body", e);
throw new InvalidPayloadException("Request payload contains invalid JSON: " + e.getMessage());
}
return properties;
}
protected String decode(String s) throws CatalogException {
try {
return s == null ? null : URLDecoder.decode(s, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new CatalogException("Unable to decode URL: " + e.getMessage(), 500);
}
}
protected JsonSerializer getSerializer() {
return serializer;
}
private RuntimeException wrapRuntimeException(RuntimeException e) {
return e instanceof CatalogRuntimeException ? e : new CatalogRuntimeException(e);
}
@XmlRootElement
// the name of this class is used as the collection name in the returned json when returning a collection
public static class Results {
public String href;
public int status;
public Results() {
// required by JAXB
}
public Results(String href, int status) {
this.href = href;
this.status = status;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import org.apache.atlas.catalog.exception.CatalogException;
import org.apache.atlas.web.util.Servlets;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import javax.xml.bind.annotation.XmlRootElement;
/**
* Exception mapper for CatalogException.
*/
@Provider
public class CatalogExceptionMapper implements ExceptionMapper<CatalogException> {
@Override
public Response toResponse(CatalogException e) {
return Response.status(e.getStatus()).entity(
new ErrorBean(e)).type(Servlets.JSON_MEDIA_TYPE).build();
}
@XmlRootElement
public static class ErrorBean {
public int status;
public String message;
public ErrorBean() {
// required for JAXB
}
public ErrorBean(CatalogException ex) {
this.status = ex.getStatus();
this.message = ex.getMessage();
}
public int getStatus() {
return status;
}
public String getMessage() {
return message;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.web.util.Servlets;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import javax.xml.bind.annotation.XmlRootElement;
import java.io.PrintWriter;
import java.io.StringWriter;
/**
* Exception mapper for CatalogRuntimeException
*/
@Provider
public class CatalogRuntimeExceptionMapper implements ExceptionMapper<CatalogRuntimeException> {
@Override
public Response toResponse(CatalogRuntimeException e) {
return Response.status(e.getStatusCode()).entity(
new ErrorBean(e)).type(Servlets.JSON_MEDIA_TYPE).build();
}
@XmlRootElement
public static class ErrorBean {
private static final String MSG_PREFIX = "An unexpected error has occurred. ";
public int status;
public String message;
public String stackTrace;
//todo: error code, developerMsg ...
public ErrorBean() {
// required for JAXB
}
public ErrorBean(CatalogRuntimeException ex) {
this.status = 500;
this.message = String.format("%s%s : %s", MSG_PREFIX, ex.toString(), ex.getCause().toString());
this.stackTrace = getStackTraceFromException(ex);
}
public int getStatus() {
return status;
}
public String getMessage() {
return message;
}
public String getStackTrace() {
return stackTrace;
}
private String getStackTraceFromException(RuntimeException e) {
StringWriter sw = new StringWriter();
e.printStackTrace(new PrintWriter(sw));
return sw.toString();
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import org.apache.atlas.catalog.BaseRequest;
import org.apache.atlas.catalog.CollectionRequest;
import org.apache.atlas.catalog.DefaultTypeSystem;
import org.apache.atlas.catalog.EntityResourceProvider;
import org.apache.atlas.catalog.EntityTagResourceProvider;
import org.apache.atlas.catalog.InstanceRequest;
import org.apache.atlas.catalog.Result;
import org.apache.atlas.catalog.exception.CatalogException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.utils.AtlasPerfTracer;
import org.apache.atlas.web.util.Servlets;
import org.slf4j.Logger;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.GenericEntity;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* Service which handles API requests for v1 entity resources.
*/
@Path("v1/entities")
@Singleton
@Service
public class EntityService extends BaseService {
private static final Logger PERF_LOG = AtlasPerfTracer.getPerfLogger("rest.EntityService");
private final EntityResourceProvider entityResourceProvider;
private final EntityTagResourceProvider entityTagResourceProvider;
@Inject
public EntityService(MetadataService metadataService, AtlasTypeDefStore typeDefStore) throws AtlasBaseException {
DefaultTypeSystem typeSystem = new DefaultTypeSystem(metadataService, typeDefStore);
entityResourceProvider = new EntityResourceProvider(typeSystem);
entityTagResourceProvider = new EntityTagResourceProvider(typeSystem);
}
@GET
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getEntities(@Context HttpHeaders headers, @Context UriInfo ui) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityService.getEntities()");
}
String queryString = decode(getQueryString(ui));
BaseRequest request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
Result result = getResources(entityResourceProvider, request);
return Response.status(Response.Status.OK).entity(getSerializer().serialize(result, ui)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@GET
@Path("{entityId}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getEntity(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("entityId") String entityId) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityService.getEntity(" + entityId + ")");
}
BaseRequest request = new InstanceRequest(Collections.<String, Object>singletonMap("id", entityId));
Result result = getResource(entityResourceProvider, request);
return Response.status(Response.Status.OK).entity(getSerializer().serialize(result, ui)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@GET
@Path("{entityId}/tags/{tag}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getEntityTag(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("entityId") String entityId,
@PathParam("tag") String tagName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityService.getEntityTag(" + entityId + ", " + tagName + ")");
}
Map<String, Object> properties = new HashMap<>();
properties.put("id", entityId);
properties.put("name", tagName);
Result result = getResource(entityTagResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(getSerializer().serialize(result, ui)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@GET
@Path("{entityId}/tags")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getEntityTags(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("entityId") String entityGuid) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityService.getEntityTags(" + entityGuid + ")");
}
BaseRequest request = new CollectionRequest(Collections.<String, Object>singletonMap("id", entityGuid),
decode(getQueryString(ui)));
Result result = getResources(entityTagResourceProvider, request);
return Response.status(Response.Status.OK).entity(getSerializer().serialize(result, ui)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@POST
@Path("{entityId}/tags/{tag}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response tagEntity(String body,
@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("entityId") String entityId,
@PathParam("tag") String tagName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityService.tagEntity(" + entityId + ", " + tagName + ")");
}
Map<String, Object> properties = new HashMap<>();
properties.put("id", entityId);
properties.put("name", tagName);
createResource(entityTagResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.CREATED).entity(
new Results(ui.getRequestUri().toString(), 201)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@POST
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response tagEntities(String body,
@Context HttpHeaders headers,
@Context UriInfo ui) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityService.tagEntities()");
}
Map<String, Object> properties = parsePayload(body);
if (properties.get("tags") == null || properties.size() != 1) {
throw new CatalogException(
"Invalid Request, no 'tags' property specified. Creation of entity resource not supported.", 400);
}
String queryString = decode(getQueryString(ui));
Collection<String> createResults = createResources(
entityTagResourceProvider, new CollectionRequest(properties, queryString));
Collection<Results> result = new ArrayList<>();
for (String relativeUrl : createResults) {
result.add(new Results(ui.getBaseUri().toString() + relativeUrl, 201));
}
return Response.status(Response.Status.CREATED).entity(
new GenericEntity<Collection<Results>>(result) {
}).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@DELETE
@Path("{entityId}/tags/{tag}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response deleteEntityTag(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("entityId") String entityId,
@PathParam("tag") String tagName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityService.deleteEntityTag()");
}
Map<String, Object> properties = new HashMap<>();
properties.put("id", entityId);
properties.put("name", tagName);
deleteResource(entityTagResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(
new Results(ui.getRequestUri().toString(), 200)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import org.apache.atlas.catalog.*;
import org.apache.atlas.catalog.exception.CatalogException;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.utils.AtlasPerfTracer;
import org.apache.atlas.web.util.Servlets;
import org.slf4j.Logger;
import org.springframework.stereotype.Component;
import javax.inject.Inject;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.PathSegment;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Service which handles API requests for taxonomy and term resources.
*/
@Path("v1/taxonomies")
@Component
public class TaxonomyService extends BaseService {
private static final Logger PERF_LOG = AtlasPerfTracer.getPerfLogger("rest.TaxonomyService");
private ResourceProvider taxonomyResourceProvider;
private ResourceProvider termResourceProvider;
@Inject
public void setMetadataService(MetadataService metadataService, AtlasTypeDefStore typeDefStore) throws AtlasBaseException {
DefaultTypeSystem typeSystem = new DefaultTypeSystem(metadataService, typeDefStore);
taxonomyResourceProvider = createTaxonomyResourceProvider(typeSystem);
termResourceProvider = createTermResourceProvider(typeSystem);
}
@GET
@Path("{taxonomyName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getTaxonomy(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.getTaxonomy(" + taxonomyName + ")");
}
Map<String, Object> properties = new HashMap<>();
properties.put("name", taxonomyName);
Result result = getResource(taxonomyResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(getSerializer().serialize(result, ui)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@GET
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getTaxonomies(@Context HttpHeaders headers, @Context UriInfo ui) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.getTaxonomies()");
}
String queryString = decode(getQueryString(ui));
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
Result result = getResources(taxonomyResourceProvider, request);
return Response.status(Response.Status.OK).entity(getSerializer().serialize(result, ui)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@POST
@Path("{taxonomyName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response createTaxonomy(String body,
@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.createTaxonomy(" + taxonomyName + ")");
}
Map<String, Object> properties = parsePayload(body);
properties.put("name", taxonomyName);
createResource(taxonomyResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.CREATED).entity(
new Results(ui.getRequestUri().toString(), 201)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@PUT
@Path("{taxonomyName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response updateTaxonomy(String body,
@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.updateTaxonomy(" + taxonomyName + ")");
}
Map<String, Object> queryProperties = new HashMap<>();
queryProperties.put("name", taxonomyName);
Map<String, Object> updateProperties = parsePayload(body);
updateResource(taxonomyResourceProvider, new InstanceRequest(queryProperties, updateProperties));
return Response.status(Response.Status.OK).entity(
new Results(ui.getRequestUri().toString(), 200)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@DELETE
@Path("{taxonomyName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response deleteTaxonomy(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.deleteTaxonomy(" + taxonomyName + ")");
}
Map<String, Object> properties = new HashMap<>();
properties.put("name", taxonomyName);
deleteResource(taxonomyResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(
new Results(ui.getRequestUri().toString(), 200)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@GET
@Path("{taxonomyName}/terms/{termName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getTaxonomyTerm(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("termName") String termName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.getTaxonomyTerm(" + taxonomyName + ", " + termName + ")");
}
TermPath termPath = new TermPath(taxonomyName, termName);
Map<String, Object> properties = new HashMap<>();
properties.put("termPath", termPath);
Result result = getResource(termResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(getSerializer().serialize(result, ui)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@GET
@Path("{taxonomyName}/terms")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getTaxonomyTerms(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.getTaxonomyTerms(" + taxonomyName + ")");
}
String queryString = decode(getQueryString(ui));
TermPath termPath = new TermPath(taxonomyName, null);
Request request = new CollectionRequest(
Collections.<String, Object>singletonMap("termPath", termPath), queryString);
Result result = getResources(termResourceProvider, request);
return Response.status(Response.Status.OK).entity(getSerializer().serialize(result, ui)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@GET
@Path("{taxonomyName}/terms/{rootTerm}/{remainder:.*}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getSubTerms(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("rootTerm") String rootTerm,
@PathParam("remainder") String remainder) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.getSubTerms(" + taxonomyName + ", " + rootTerm + ", " + remainder + ")");
}
Result result;
String termName = String.format("%s%s", rootTerm,
remainder.replaceAll("/?terms/?([.]*)", "$1."));
String queryString = decode(getQueryString(ui));
TermPath termPath = new TermPath(taxonomyName, termName);
Map<String, Object> properties = new HashMap<>();
properties.put("termPath", termPath);
List<PathSegment> pathSegments = ui.getPathSegments();
int lastIndex = pathSegments.size() - 1;
String lastSegment = pathSegments.get(lastIndex).getPath();
if (lastSegment.equals("terms") || (lastSegment.isEmpty() && pathSegments.get(lastIndex - 1).getPath().equals("terms"))) {
result = getResources(termResourceProvider, new CollectionRequest(properties, queryString));
} else {
result = getResource(termResourceProvider, new InstanceRequest(properties));
}
return Response.status(Response.Status.OK).entity(getSerializer().serialize(result, ui)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@POST
@Path("{taxonomyName}/terms/{termName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response createTerm(String body,
@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("termName") String termName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.createTerm(" + taxonomyName + ", " + termName + ")");
}
Map<String, Object> properties = parsePayload(body);
validateName(termName);
properties.put("termPath", new TermPath(taxonomyName, termName));
createResource(termResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.CREATED).entity(
new Results(ui.getRequestUri().toString(), 201)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@POST
@Path("{taxonomyName}/terms/{termName}/{remainder:.*}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response createSubTerm(String body,
@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("termName") String termName,
@PathParam("remainder") String remainder) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.createSubTerm(" + taxonomyName + ", " + termName + ", " + remainder + ")");
}
Map<String, Object> properties = parsePayload(body);
String[] pathTokens = remainder.split("/");
validateName(pathTokens[pathTokens.length - 1]);
properties.put("termPath", new TermPath(taxonomyName, String.format("%s%s", termName,
remainder.replaceAll("/?terms/?([.]*)", "$1."))));
createResource(termResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.CREATED).entity(
new Results(ui.getRequestUri().toString(), 201)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@PUT
@Path("{taxonomyName}/terms/{termName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response updateTerm(String body,
@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("termName") String termName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.updateTerm(" + taxonomyName + ", " + termName + ")");
}
Map<String, Object> queryProperties = new HashMap<>();
queryProperties.put("termPath", new TermPath(taxonomyName, termName));
Map<String, Object> updateProperties = parsePayload(body);
updateResource(termResourceProvider, new InstanceRequest(queryProperties, updateProperties));
return Response.status(Response.Status.OK).entity(
new Results(ui.getRequestUri().toString(), 200)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@PUT
@Path("{taxonomyName}/terms/{termName}/{remainder:.*}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response updateSubTerm(String body,
@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("termName") String termName,
@PathParam("remainder") String remainder) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.updateSubTerm(" + taxonomyName + ", " + termName + ", " + remainder + ")");
}
Map<String, Object> queryProperties = new HashMap<>();
queryProperties.put("termPath", new TermPath(taxonomyName, String.format("%s%s", termName,
remainder.replaceAll("/?terms/?([.]*)", "$1."))));
Map<String, Object> updateProperties = parsePayload(body);
updateResource(termResourceProvider, new InstanceRequest(queryProperties, updateProperties));
return Response.status(Response.Status.OK).entity(
new Results(ui.getRequestUri().toString(), 200)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@DELETE
@Path("{taxonomyName}/terms/{termName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response deleteTerm(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("termName") String termName) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.deleteTerm(" + taxonomyName + ", " + termName + ")");
}
Map<String, Object> properties = new HashMap<>();
properties.put("termPath", new TermPath(taxonomyName, termName));
deleteResource(termResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(
new Results(ui.getRequestUri().toString(), 200)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
@DELETE
@Path("{taxonomyName}/terms/{termName}/{remainder:.*}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response deleteSubTerm(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("termName") String termName,
@PathParam("remainder") String remainder) throws CatalogException {
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "TaxonomyService.deleteSubTerm(" + taxonomyName + ", " + termName + ", " + remainder + ")");
}
Map<String, Object> properties = new HashMap<>();
properties.put("termPath", new TermPath(taxonomyName, String.format("%s%s", termName,
remainder.replaceAll("/?terms/?([.]*)", "$1."))));
deleteResource(termResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(
new Results(ui.getRequestUri().toString(), 200)).build();
} finally {
AtlasPerfTracer.log(perf);
}
}
protected ResourceProvider createTaxonomyResourceProvider(AtlasTypeSystem typeSystem) {
return new TaxonomyResourceProvider(typeSystem);
}
protected ResourceProvider createTermResourceProvider(AtlasTypeSystem typeSystem) {
return new TermResourceProvider(typeSystem);
}
private void validateName(String name) throws InvalidPayloadException {
if (name.contains(".")) {
throw new InvalidPayloadException("The \"name\" property may not contain the character '.'");
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.newCapture;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import javax.ws.rs.core.PathSegment;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.apache.atlas.AtlasException;
import org.apache.atlas.catalog.AtlasTypeSystem;
import org.apache.atlas.catalog.JsonSerializer;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.ResourceProvider;
import org.apache.atlas.catalog.Result;
import org.apache.atlas.catalog.TaxonomyResourceProvider;
import org.apache.atlas.catalog.TermPath;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.easymock.Capture;
import org.testng.annotations.Test;
/**
* Unit tests for TaxonomyService.
*/
public class TaxonomyServiceTest {
@Test
public void testGetTaxonomy() throws Exception {
String taxonomyName = "testTaxonomy";
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
JsonSerializer serializer = createStrictMock(JsonSerializer.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> resultPropertyMaps = new ArrayList<>();
Map<String, Object> propertyMap = new HashMap<>();
propertyMap.put("name", "testTaxonomy");
resultPropertyMaps.add(propertyMap);
Result result = new Result(resultPropertyMaps);
expect(taxonomyResourceProvider.getResourceById(capture(requestCapture))).andReturn(result);
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
expect(serializer.serialize(result, uriInfo)).andReturn("Taxonomy Get Response");
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider, serializer);
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, serializer);
Response response = service.getTaxonomy(null, uriInfo, taxonomyName);
Request request = requestCapture.getValue();
Map<String, Object> requestProperties = request.getQueryProperties();
assertEquals(requestProperties.size(), 1);
assertEquals(requestProperties.get("name"), taxonomyName);
assertEquals(response.getStatus(), 200);
assertEquals(response.getEntity(), "Taxonomy Get Response");
verify(uriInfo, taxonomyResourceProvider, termResourceProvider, serializer);
}
@Test
public void testGetTaxonomies() throws Exception {
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
URI uri = new URI("http://localhost:21000/api/atlas/v1/taxonomies?name:testTaxonomy");
JsonSerializer serializer = createStrictMock(JsonSerializer.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> resultPropertyMaps = new ArrayList<>();
Map<String, Object> propertyMap = new HashMap<>();
propertyMap.put("name", "testTaxonomy");
resultPropertyMaps.add(propertyMap);
Result result = new Result(resultPropertyMaps);
// set mock expectations
expect(uriInfo.getRequestUri()).andReturn(uri);
expect(taxonomyResourceProvider.getResources(capture(requestCapture))).andReturn(result);
expect(serializer.serialize(result, uriInfo)).andReturn("Taxonomy Get Response");
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider, serializer);
// instantiate service and invoke method being tested
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, serializer);
Response response = service.getTaxonomies(null, uriInfo);
Request request = requestCapture.getValue();
assertTrue(request.getQueryProperties().isEmpty());
assertEquals(request.getQueryString(), "name:testTaxonomy");
assertEquals(response.getStatus(), 200);
assertEquals(response.getEntity(), "Taxonomy Get Response");
verify(uriInfo, taxonomyResourceProvider, termResourceProvider, serializer);
}
@Test
public void testCreateTaxonomy() throws Exception {
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
URI uri = new URI("http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy");
Capture<Request> requestCapture = newCapture();
String body = "{ \"description\" : \"test description\" } ";
// set mock expectations
expect(uriInfo.getRequestUri()).andReturn(uri);
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
taxonomyResourceProvider.createResource(capture(requestCapture));
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider);
// instantiate service and invoke method being tested
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, null);
Response response = service.createTaxonomy(body, null, uriInfo, "testTaxonomy");
Request request = requestCapture.getValue();
assertEquals(request.getQueryProperties().size(), 2);
assertEquals(request.getQueryProperties().get("name"), "testTaxonomy");
assertEquals(request.getQueryProperties().get("description"), "test description");
assertNull(request.getQueryString());
assertEquals(response.getStatus(), 201);
BaseService.Results createResults = (BaseService.Results) response.getEntity();
assertEquals(createResults.href, "http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy");
assertEquals(createResults.status, 201);
verify(uriInfo, taxonomyResourceProvider, termResourceProvider);
}
@Test
public void testDeleteTaxonomy() throws Exception {
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
URI uri = new URI("http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy");
Capture<Request> requestCapture = newCapture();
// set mock expectations
expect(uriInfo.getRequestUri()).andReturn(uri);
taxonomyResourceProvider.deleteResourceById(capture(requestCapture));
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider);
// instantiate service and invoke method being tested
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, null);
Response response = service.deleteTaxonomy(null, uriInfo, "testTaxonomy");
Request request = requestCapture.getValue();
assertEquals(request.getQueryProperties().size(), 1);
assertEquals(request.getQueryProperties().get("name"), "testTaxonomy");
assertNull(request.getQueryString());
assertEquals(response.getStatus(), 200);
BaseService.Results createResults = (BaseService.Results) response.getEntity();
assertEquals(createResults.href, "http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy");
assertEquals(createResults.status, 200);
verify(uriInfo, taxonomyResourceProvider, termResourceProvider);
}
@Test
public void testGetTaxonomyTerm() throws Exception {
String taxonomyName = "testTaxonomy";
String termName = "testTaxonomy.termName";
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
JsonSerializer serializer = createStrictMock(JsonSerializer.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> resultPropertyMaps = new ArrayList<>();
Map<String, Object> propertyMap = new HashMap<>();
propertyMap.put("name", "testTaxonomy.termName");
resultPropertyMaps.add(propertyMap);
Result result = new Result(resultPropertyMaps);
expect(termResourceProvider.getResourceById(capture(requestCapture))).andReturn(result);
expect(serializer.serialize(result, uriInfo)).andReturn("Taxonomy Term Get Response");
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider, serializer);
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, serializer);
Response response = service.getTaxonomyTerm(null, uriInfo, taxonomyName, termName);
Request request = requestCapture.getValue();
Map<String, Object> requestProperties = request.getQueryProperties();
assertEquals(requestProperties.size(), 1);
TermPath termPath = (TermPath) request.getQueryProperties().get("termPath");
assertEquals(termPath.getFullyQualifiedName(), "testTaxonomy.testTaxonomy.termName");
assertEquals(response.getStatus(), 200);
assertEquals(response.getEntity(), "Taxonomy Term Get Response");
verify(uriInfo, taxonomyResourceProvider, termResourceProvider, serializer);
}
@Test
public void testGetTaxonomyTerms() throws Exception {
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
URI uri = new URI("http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy/terms?name:testTaxonomy.testTerm");
JsonSerializer serializer = createStrictMock(JsonSerializer.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> resultPropertyMaps = new ArrayList<>();
Map<String, Object> propertyMap = new HashMap<>();
propertyMap.put("name", "testTaxonomy.testTerm");
resultPropertyMaps.add(propertyMap);
Result result = new Result(resultPropertyMaps);
// set mock expectations
expect(uriInfo.getRequestUri()).andReturn(uri);
expect(termResourceProvider.getResources(capture(requestCapture))).andReturn(result);
expect(serializer.serialize(result, uriInfo)).andReturn("Taxonomy Term Get Response");
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider, serializer);
// instantiate service and invoke method being tested
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, serializer);
Response response = service.getTaxonomyTerms(null, uriInfo, "testTaxonomy");
Request request = requestCapture.getValue();
assertEquals(request.getQueryProperties().size(), 1);
TermPath termPath = (TermPath) request.getQueryProperties().get("termPath");
assertEquals(termPath.getFullyQualifiedName(), "testTaxonomy");
assertEquals(request.getQueryString(), "name:testTaxonomy.testTerm");
assertEquals(response.getStatus(), 200);
assertEquals(response.getEntity(), "Taxonomy Term Get Response");
verify(uriInfo, taxonomyResourceProvider, termResourceProvider, serializer);
}
@Test
public void testGetSubTerms_instance() throws Exception {
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
URI uri = new URI("http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy/terms/testTerm/terms/testTerm2");
JsonSerializer serializer = createStrictMock(JsonSerializer.class);
PathSegment segment1 = createNiceMock(PathSegment.class);
PathSegment segment2 = createNiceMock(PathSegment.class);
PathSegment segment3 = createNiceMock(PathSegment.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> resultPropertyMaps = new ArrayList<>();
Map<String, Object> propertyMap = new HashMap<>();
propertyMap.put("name", "testTaxonomy.testTerm.testTerm2");
resultPropertyMaps.add(propertyMap);
Result result = new Result(resultPropertyMaps);
// set mock expectations
expect(uriInfo.getRequestUri()).andReturn(uri);
expect(uriInfo.getPathSegments()).andReturn(Arrays.asList(segment1, segment2, segment3));
expect(segment3.getPath()).andReturn("testTerm2");
expect(termResourceProvider.getResourceById(capture(requestCapture))).andReturn(result);
expect(serializer.serialize(result, uriInfo)).andReturn("Taxonomy Term Get Response");
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider, serializer,
segment1, segment2, segment3);
// instantiate service and invoke method being tested
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, serializer);
Response response = service.getSubTerms(null, uriInfo, "testTaxonomy", "testTerm", "/terms/testTerm2");
Request request = requestCapture.getValue();
assertEquals(request.getQueryProperties().size(), 1);
TermPath termPath = (TermPath) request.getQueryProperties().get("termPath");
assertEquals(termPath.getFullyQualifiedName(), "testTaxonomy.testTerm.testTerm2");
assertNull(request.getQueryString());
assertEquals(response.getStatus(), 200);
assertEquals(response.getEntity(), "Taxonomy Term Get Response");
verify(uriInfo, taxonomyResourceProvider, termResourceProvider, serializer,
segment1, segment2, segment3);
}
@Test
public void testGetSubTerms_collection() throws Exception {
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
URI uri = new URI("http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy/terms/testTerm/terms/testTerm2/terms?name:testTaxonomy.testTerm.testTerm2.testTerm3");
JsonSerializer serializer = createStrictMock(JsonSerializer.class);
// would actually be more segments but at this time only the last segment is used
PathSegment segment1 = createNiceMock(PathSegment.class);
PathSegment segment2 = createNiceMock(PathSegment.class);
PathSegment segment3 = createNiceMock(PathSegment.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> resultPropertyMaps = new ArrayList<>();
Map<String, Object> propertyMap = new HashMap<>();
propertyMap.put("name", "testTaxonomy.testTerm.testTerm2.testTerm3");
resultPropertyMaps.add(propertyMap);
Result result = new Result(resultPropertyMaps);
// set mock expectations
expect(uriInfo.getRequestUri()).andReturn(uri);
expect(uriInfo.getPathSegments()).andReturn(Arrays.asList(segment1, segment2, segment3));
expect(segment3.getPath()).andReturn("terms");
expect(termResourceProvider.getResources(capture(requestCapture))).andReturn(result);
expect(serializer.serialize(result, uriInfo)).andReturn("Taxonomy Term Get Response");
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider, serializer,
segment1, segment2, segment3);
// instantiate service and invoke method being tested
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, serializer);
Response response = service.getSubTerms(null, uriInfo, "testTaxonomy", "testTerm", "/terms/testTerm2/terms");
Request request = requestCapture.getValue();
assertEquals(request.getQueryProperties().size(), 1);
TermPath termPath = (TermPath) request.getQueryProperties().get("termPath");
assertEquals(termPath.getFullyQualifiedName(), "testTaxonomy.testTerm.testTerm2.");
assertEquals(request.getQueryString(), "name:testTaxonomy.testTerm.testTerm2.testTerm3");
assertEquals(response.getStatus(), 200);
assertEquals(response.getEntity(), "Taxonomy Term Get Response");
verify(uriInfo, taxonomyResourceProvider, termResourceProvider, serializer,
segment1, segment2, segment3);
}
@Test
public void testCreateTerm() throws Exception {
String taxonomyName = "testTaxonomy";
String termName = "testTerm";
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
URI uri = new URI("http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy/terms/testTerm");
Capture<Request> requestCapture = newCapture();
String body = "{ \"description\" : \"test description\" } ";
// set mock expectations
expect(uriInfo.getRequestUri()).andReturn(uri);
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
termResourceProvider.createResource(capture(requestCapture));
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider);
// instantiate service and invoke method being tested
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, null);
Response response = service.createTerm(body, null, uriInfo, taxonomyName, termName);
Request request = requestCapture.getValue();
assertEquals(request.getQueryProperties().size(), 2);
assertEquals(request.getQueryProperties().get("description"), "test description");
TermPath termPath = (TermPath) request.getQueryProperties().get("termPath");
assertEquals(termPath.getFullyQualifiedName(), "testTaxonomy.testTerm");
assertNull(request.getQueryString());
assertEquals(response.getStatus(), 201);
BaseService.Results createResults = (BaseService.Results) response.getEntity();
assertEquals(createResults.href, "http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy/terms/testTerm");
assertEquals(createResults.status, 201);
verify(uriInfo, taxonomyResourceProvider, termResourceProvider);
}
@Test
public void testCreateSubTerm() throws Exception {
String taxonomyName = "testTaxonomy";
String termName = "testTerm";
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
URI uri = new URI("http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy/terms/testTerm/terms/testTerm2");
Capture<Request> requestCapture = newCapture();
String body = "{ \"description\" : \"test description\" } ";
// set mock expectations
expect(uriInfo.getRequestUri()).andReturn(uri);
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
termResourceProvider.createResource(capture(requestCapture));
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider);
// instantiate service and invoke method being tested
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, null);
Response response = service.createSubTerm(body, null, uriInfo, taxonomyName, termName, "/terms/testTerm2");
Request request = requestCapture.getValue();
assertEquals(request.getQueryProperties().size(), 2);
assertEquals(request.getQueryProperties().get("description"), "test description");
TermPath termPath = (TermPath) request.getQueryProperties().get("termPath");
assertEquals(termPath.getFullyQualifiedName(), "testTaxonomy.testTerm.testTerm2");
assertNull(request.getQueryString());
assertEquals(response.getStatus(), 201);
BaseService.Results createResults = (BaseService.Results) response.getEntity();
assertEquals(createResults.href, "http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy/terms/testTerm/terms/testTerm2");
assertEquals(createResults.status, 201);
verify(uriInfo, taxonomyResourceProvider, termResourceProvider);
}
@Test
public void testDeleteTerm() throws Exception {
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
URI uri = new URI("http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy/terms/testTerm");
Capture<Request> requestCapture = newCapture();
// set mock expectations
expect(uriInfo.getRequestUri()).andReturn(uri);
termResourceProvider.deleteResourceById(capture(requestCapture));
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider);
// instantiate service and invoke method being tested
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, null);
Response response = service.deleteTerm(null, uriInfo, "testTaxonomy", "testTerm");
Request request = requestCapture.getValue();
assertEquals(request.getQueryProperties().size(), 1);
TermPath termPath = (TermPath) request.getQueryProperties().get("termPath");
assertEquals(termPath.getFullyQualifiedName(), "testTaxonomy.testTerm");
assertNull(request.getQueryString());
assertEquals(response.getStatus(), 200);
BaseService.Results createResults = (BaseService.Results) response.getEntity();
assertEquals(createResults.href, "http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy/terms/testTerm");
assertEquals(createResults.status, 200);
verify(uriInfo, taxonomyResourceProvider, termResourceProvider);
}
@Test
public void testDeleteSubTerm() throws Exception {
MetadataService metadataService = createStrictMock(MetadataService.class);
AtlasTypeDefStore typeDefStore = createStrictMock(AtlasTypeDefStore.class);
ResourceProvider taxonomyResourceProvider = createStrictMock(ResourceProvider.class);
ResourceProvider termResourceProvider = createStrictMock(ResourceProvider.class);
UriInfo uriInfo = createNiceMock(UriInfo.class);
URI uri = new URI("http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy/terms/testTerm/terms/testTerm2");
Capture<Request> requestCapture = newCapture();
// set mock expectations
expect(uriInfo.getRequestUri()).andReturn(uri);
termResourceProvider.deleteResourceById(capture(requestCapture));
expect(metadataService.getTypeDefinition(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE)).andReturn(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE + "-definition");
replay(uriInfo, metadataService, taxonomyResourceProvider, termResourceProvider);
// instantiate service and invoke method being tested
TestTaxonomyService service = new TestTaxonomyService(
metadataService, typeDefStore, taxonomyResourceProvider, termResourceProvider, null);
Response response = service.deleteSubTerm(null, uriInfo, "testTaxonomy", "testTerm", "terms/testTerm2");
Request request = requestCapture.getValue();
assertEquals(request.getQueryProperties().size(), 1);
TermPath termPath = (TermPath) request.getQueryProperties().get("termPath");
assertEquals(termPath.getFullyQualifiedName(), "testTaxonomy.testTerm.testTerm2");
assertNull(request.getQueryString());
assertEquals(response.getStatus(), 200);
BaseService.Results createResults = (BaseService.Results) response.getEntity();
assertEquals(createResults.href, "http://localhost:21000/api/atlas/v1/taxonomies/testTaxonomy/terms/testTerm/terms/testTerm2");
assertEquals(createResults.status, 200);
verify(uriInfo, taxonomyResourceProvider, termResourceProvider);
}
private static class TestTaxonomyService extends TaxonomyService {
private final ResourceProvider testTaxonomyResourceProvider;
private final ResourceProvider testTermResourceProvider;
private final JsonSerializer testSerializer;
private boolean transactionInitialized = false;
public TestTaxonomyService(MetadataService metadataService,
AtlasTypeDefStore typeDefStore,
ResourceProvider taxonomyProvider,
ResourceProvider termResourceProvider,
JsonSerializer serializer) throws AtlasBaseException {
testTaxonomyResourceProvider = taxonomyProvider;
testTermResourceProvider = termResourceProvider;
testSerializer = serializer;
setMetadataService(metadataService, typeDefStore);
}
@Override
protected ResourceProvider createTaxonomyResourceProvider(AtlasTypeSystem typeSystem) {
return testTaxonomyResourceProvider;
}
@Override
protected ResourceProvider createTermResourceProvider(AtlasTypeSystem typeSystem) {
return testTermResourceProvider;
}
@Override
protected JsonSerializer getSerializer() {
return testSerializer;
}
}
}
\ No newline at end of file
......@@ -186,7 +186,7 @@ public class BaseSecurityTest {
public static void setUpPolicyStore(String tmpDir) throws Exception {
StringBuilder policyStr = new StringBuilder(1024);
policyStr.append("adminPolicy;;admin:rwud;;ROLE_ADMIN:rwud;;type:*,entity:*,operation:*\n");
policyStr.append("dataStewardPolicy;;testuser:rwud;;DATA_STEWARD:rwu;;type:*,entity:*,taxonomy:*,term:*\n");
policyStr.append("dataStewardPolicy;;testuser:rwud;;DATA_STEWARD:rwu;;type:*,entity:*\n");
File policyFile = new File(tmpDir, "policy-store.txt");
FileUtils.write(policyFile, policyStr.toString());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment