Commit aaf2971a by Hemanth Yamijala

ATLAS-491 Business Catalog / Taxonomy (jspeidel via yhemanth)

parent b65dd91c
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.atlas</groupId>
<artifactId>apache-atlas</artifactId>
<version>0.7-incubating-SNAPSHOT</version>
</parent>
<artifactId>atlas-catalog</artifactId>
<description>Apache Atlas Business Catalog Module</description>
<name>Apache Atlas Business Catalog</name>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-repository</artifactId>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-typesystem</artifactId>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-server-api</artifactId>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-throwingproviders</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId>
</dependency>
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
</dependency>
<dependency>
<groupId>com.tinkerpop.blueprints</groupId>
<artifactId>blueprints-core</artifactId>
</dependency>
<dependency>
<groupId>com.tinkerpop.gremlin</groupId>
<artifactId>gremlin-java</artifactId>
</dependency>
<!-- testing -->
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<version>3.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.4</version>
<configuration>
<excludes>
<exclude>**/log4j.xml</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</project>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import java.util.Map;
/**
* Abstraction for Atlas Type System.
*/
public interface AtlasTypeSystem {
/**
* Create a Type in the Atlas Type System if it doesn't already exist.
* If the type already exists, this method has no affect.
*
* @param resourceDefinition resource definition for type being created
* @param name type name
* @param description description of the type being created
*
* @throws ResourceAlreadyExistsException if entity already exists
*/
void createClassType(ResourceDefinition resourceDefinition, String name, String description)
throws ResourceAlreadyExistsException;
/**
* Create an entity in the Atlas Type System for the provided request and resource definition.
* If Type associated with the entity doesn't already exist, it is created.
*
* @param definition the definition of the resource for which we are creating the entity
* @param request the user request
*
* @throws ResourceAlreadyExistsException if type already exists
*/
void createEntity(ResourceDefinition definition, Request request)
throws ResourceAlreadyExistsException;
/**
* Create a trait instance instance in the Atlas Type System.
*
* @param resourceDefinition resource definition for trait type being created
* @param name type name
* @param description description of the type being created
*
* @throws ResourceAlreadyExistsException if type already exists
*/
void createTraitType(ResourceDefinition resourceDefinition, String name, String description)
throws ResourceAlreadyExistsException;
/**
* Create a trait instance in the Atlas Type System and associate it with the entity identified by the provided guid.
*
* @param guid id of the entity which will be associated with the trait instance
* @param typeName type name of the trait
* @param properties property map used to populate the trait instance
*
* @throws ResourceAlreadyExistsException if trait instance is already associated with the entity
*/
void createTraitInstance(String guid, String typeName, Map<String, Object> properties)
throws ResourceAlreadyExistsException;
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
/**
* Base user API request.
*/
public abstract class BaseRequest implements Request {
private final Map<String, Object> properties = new HashMap<>();
private final String queryString;
private final Collection<String> additionalSelectProperties = new HashSet<>();
protected BaseRequest(Map<String, Object> properties, String queryString) {
if (properties != null) {
this.properties.putAll((properties));
}
this.queryString = queryString;
}
public Map<String, Object> getProperties() {
return properties;
}
public <T> T getProperty(String name) {
return (T)properties.get(name);
}
public String getQueryString() {
return queryString;
}
@Override
public void addAdditionalSelectProperties(Collection<String> resultProperties) {
additionalSelectProperties.addAll(resultProperties);
}
@Override
public Collection<String> getAdditionalSelectProperties() {
return additionalSelectProperties;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.query.QueryFactory;
import java.util.*;
/**
* Base class for resource providers.
*/
public abstract class BaseResourceProvider implements ResourceProvider {
protected AtlasTypeSystem typeSystem;
protected QueryFactory queryFactory = new QueryFactory();
protected BaseResourceProvider(AtlasTypeSystem typeSystem) {
this.typeSystem = typeSystem;
}
protected void setQueryFactory(QueryFactory factory) {
queryFactory = factory;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.Map;
/**
* A request for a collection resource.
*/
public class CollectionRequest extends BaseRequest {
public CollectionRequest(Map<String, Object> properties, String queryString) {
super(properties, queryString);
}
@Override
public Cardinality getCardinality() {
return Cardinality.COLLECTION;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.GregorianCalendar;
/**
* Format a date field which is represented as a long.
*/
public class DefaultDateFormatter implements PropertyValueFormatter<Long, String> {
//todo: obtain format from atlas proper
public static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd:HH:mm:ss");
@Override
public String format(Long l) {
Calendar calendar = new GregorianCalendar();
calendar.setTimeInMillis(l);
return DATE_FORMAT.format(calendar.getTime());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.AtlasException;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.types.FieldMapping;
import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.atlas.typesystem.types.TypeSystem;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* Default property mapper which translates property names to/from name exposed in API to internal fully qualified name.
*/
public class DefaultPropertyMapper implements PropertyMapper {
//todo: abstract HierarchicalType
private Map<String, HierarchicalType> typeInstances = new HashMap<>();
private final Map<String, String> m_qualifiedToCleanMap = new HashMap<>();
private final Map<String, String> m_cleanToQualifiedMap = new HashMap<>();
public DefaultPropertyMapper() {
this(Collections.<String, String>emptyMap(), Collections.<String, String>emptyMap());
}
public DefaultPropertyMapper(Map<String, String> qualifiedToCleanMap,
Map<String, String> cleanToQualifiedMap) {
setDefaultMappings();
m_qualifiedToCleanMap.putAll(qualifiedToCleanMap);
m_cleanToQualifiedMap.putAll(cleanToQualifiedMap);
}
@Override
public String toCleanName(String propName, String type) {
HierarchicalType dataType = getDataType(type);
String replacement = m_qualifiedToCleanMap.get(propName);
if (replacement == null && dataType != null) {
FieldMapping fieldMap = dataType.fieldMapping();
if (! fieldMap.fields.containsKey(propName) && propName.contains(".")) {
String cleanName = propName.substring(propName.lastIndexOf('.') + 1);
if (fieldMap.fields.containsKey(cleanName)) {
replacement = cleanName;
}
}
}
if (replacement == null) {
replacement = propName;
}
return replacement;
}
@Override
public String toFullyQualifiedName(String propName, String type) {
HierarchicalType dataType = getDataType(type);
String replacement = m_cleanToQualifiedMap.get(propName);
if (replacement == null && dataType != null) {
FieldMapping fieldMap = dataType.fieldMapping();
if (fieldMap.fields.containsKey(propName)) {
try {
replacement = dataType.getQualifiedName(propName);
} catch (AtlasException e) {
throw new CatalogRuntimeException(String.format(
"Unable to resolve fully qualified property name for type '%s': %s", type, e), e);
}
}
}
if (replacement == null) {
replacement = propName;
}
return replacement;
}
//todo: abstract this via AtlasTypeSystem
protected synchronized HierarchicalType getDataType(String type) {
HierarchicalType dataType = typeInstances.get(type);
//todo: are there still cases where type can be null?
if (dataType == null) {
dataType = createDataType(type);
typeInstances.put(type, dataType);
}
return dataType;
}
protected HierarchicalType createDataType(String type) {
try {
return TypeSystem.getInstance().getDataType(HierarchicalType.class, type);
} catch (AtlasException e) {
throw new CatalogRuntimeException("Unable to get type instance from type system for type: " + type, e);
}
}
private void setDefaultMappings() {
//todo: these are all internal "__*" properties
//todo: should be able to ask type system for the "clean" name for these
m_qualifiedToCleanMap.put(Constants.GUID_PROPERTY_KEY, "id");
m_cleanToQualifiedMap.put("id", Constants.GUID_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.TIMESTAMP_PROPERTY_KEY, "creation_time");
m_cleanToQualifiedMap.put("creation_time", Constants.TIMESTAMP_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, "modified_time");
m_cleanToQualifiedMap.put("modified_time", Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.ENTITY_TYPE_PROPERTY_KEY, "type");
m_cleanToQualifiedMap.put("type", Constants.ENTITY_TYPE_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.VERSION_PROPERTY_KEY, "version");
m_cleanToQualifiedMap.put("version", Constants.VERSION_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.TRAIT_NAMES_PROPERTY_KEY, "trait_names");
m_cleanToQualifiedMap.put("trait_names", Constants.TRAIT_NAMES_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.SUPER_TYPES_PROPERTY_KEY, "super_types");
m_cleanToQualifiedMap.put("super_types", Constants.SUPER_TYPES_PROPERTY_KEY);
m_qualifiedToCleanMap.put(Constants.STATE_PROPERTY_KEY, "state");
m_cleanToQualifiedMap.put("state", Constants.STATE_PROPERTY_KEY);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.AtlasException;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.exception.EntityExistsException;
import org.apache.atlas.typesystem.exception.TypeExistsException;
import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.atlas.typesystem.types.*;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
/**
* Default implementation.
*/
public class DefaultTypeSystem implements AtlasTypeSystem {
private final MetadataService metadataService;
/**
* Constructor.
*
* @param metadataService atlas metadata service
*/
public DefaultTypeSystem(MetadataService metadataService) {
this.metadataService = metadataService;
}
@Override
public void createEntity(ResourceDefinition definition, Request request) throws ResourceAlreadyExistsException {
String typeName = definition.getTypeName();
try {
createClassType(definition, typeName, typeName + " Definition");
} catch (ResourceAlreadyExistsException e) {
// ok if type already exists
}
try {
Referenceable entity = new Referenceable(typeName, request.getProperties());
ITypedReferenceableInstance typedInstance = metadataService.getTypedReferenceableInstance(entity);
metadataService.createEntities(Collections.singletonList(typedInstance).toArray(new ITypedReferenceableInstance[1]));
} catch (EntityExistsException e) {
throw new ResourceAlreadyExistsException(
"Attempted to create an entity which already exists: " + request.getProperties());
} catch (AtlasException e) {
throw new CatalogRuntimeException("An expected exception occurred creating an entity: " + e, e);
}
}
@Override
public void createClassType(ResourceDefinition resourceDefinition, String name, String description)
throws ResourceAlreadyExistsException {
createType(resourceDefinition.getPropertyDefinitions(), ClassType.class, name, description, false);
}
@Override
public void createTraitType(ResourceDefinition resourceDefinition, String name, String description)
throws ResourceAlreadyExistsException {
createType(resourceDefinition.getPropertyDefinitions(), TraitType.class, name, description, true);
}
public void createTraitInstance(String guid, String typeName, Map<String, Object> properties)
throws ResourceAlreadyExistsException {
try {
// not using the constructor with properties argument because it is marked 'InterfaceAudience.Private'
Struct struct = new Struct(typeName);
for (Map.Entry<String, Object> propEntry : properties.entrySet()) {
struct.set(propEntry.getKey(), propEntry.getValue());
}
metadataService.addTrait(guid, metadataService.createTraitInstance(struct));
} catch (IllegalArgumentException e) {
//todo: unfortunately, IllegalArgumentException can be thrown for other reasons
if (e.getMessage().contains("is already defined for entity")) {
throw new ResourceAlreadyExistsException(
String.format("Tag '%s' already associated with the entity", typeName));
} else {
throw e;
}
} catch (AtlasException e) {
throw new CatalogRuntimeException(String.format(
"Unable to create trait instance '%s' in type system: %s", typeName, e), e);
}
}
private <T extends HierarchicalType> void createType(Collection<AttributeDefinition> attributes,
Class<T> type,
String name,
String description,
boolean isTrait)
throws ResourceAlreadyExistsException {
try {
HierarchicalTypeDefinition<T> definition = new HierarchicalTypeDefinition<>(type, name, description, null,
attributes.toArray(new AttributeDefinition[attributes.size()]));
metadataService.createType(TypesSerialization.toJson(definition, isTrait));
} catch (TypeExistsException e) {
throw new ResourceAlreadyExistsException(String.format("Type '%s' already exists", name));
} catch (AtlasException e) {
throw new CatalogRuntimeException(String.format(
"Unable to create type '%s' in type system: %s", name, e), e);
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.EntityResourceDefinition;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.*;
import org.apache.atlas.catalog.query.AtlasQuery;
import java.util.*;
/**
* Provider for entity resources.
*/
public class EntityResourceProvider extends BaseResourceProvider implements ResourceProvider {
private final static ResourceDefinition resourceDefinition = new EntityResourceDefinition();
public EntityResourceProvider(AtlasTypeSystem typeSystem) {
super(typeSystem);
}
@Override
public Result getResourceById(Request request) throws ResourceNotFoundException {
AtlasQuery atlasQuery;
try {
atlasQuery = queryFactory.createEntityQuery(request);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Unable to compile internal Entity query: " + e, e);
}
Collection<Map<String, Object>> results = atlasQuery.execute();
if (results.isEmpty()) {
throw new ResourceNotFoundException(String.format("Entity '%s' not found.",
request.getProperty(resourceDefinition.getIdPropertyName())));
}
return new Result(results);
}
@Override
public Result getResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
AtlasQuery atlasQuery = queryFactory.createEntityQuery(request);
return new Result(atlasQuery.execute());
}
@Override
public void createResource(Request request)
throws InvalidPayloadException, ResourceAlreadyExistsException, ResourceNotFoundException {
// creation of entities is currently unsupported
throw new UnsupportedOperationException("Creation of entities is not currently supported");
}
@Override
public Collection<String> createResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
throw new UnsupportedOperationException("Creation of entities is not currently supported");
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.*;
import org.apache.atlas.catalog.query.AtlasQuery;
import java.util.*;
/**
* Provider for entity tag resources.
*/
public class EntityTagResourceProvider extends BaseResourceProvider implements ResourceProvider {
private final static ResourceDefinition resourceDefinition = new EntityTagResourceDefinition();
private TermResourceProvider termResourceProvider;
public EntityTagResourceProvider(AtlasTypeSystem typeSystem) {
super(typeSystem);
}
@Override
public Result getResourceById(Request request) throws ResourceNotFoundException {
AtlasQuery atlasQuery;
try {
atlasQuery = queryFactory.createEntityTagQuery(request);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Unable to compile internal Entity Tag query: " + e, e);
}
Collection<Map<String, Object>> results = atlasQuery.execute();
if (results.isEmpty()) {
throw new ResourceNotFoundException(String.format("Tag '%s' not found.",
request.getProperty(resourceDefinition.getIdPropertyName())));
}
return new Result(results);
}
@Override
public Result getResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
AtlasQuery atlasQuery = queryFactory.createEntityTagQuery(request);
return new Result(atlasQuery.execute());
}
@Override
public void createResource(Request request)
throws InvalidPayloadException, ResourceAlreadyExistsException, ResourceNotFoundException {
String entityId = String.valueOf(request.getProperties().remove("id"));
resourceDefinition.validate(request);
Result termResult = getTermQueryResult(request.<String>getProperty("name"));
Map<String, Object> termProperties = termResult.getPropertyMaps().iterator().next();
//todo: use constant for property name
if (String.valueOf(termProperties.get("available_as_tag")).equals("false")) {
throw new InvalidPayloadException(
"Attempted to tag an entity with a term which is not available to be tagged");
}
tagEntities(Collections.singleton(entityId), termProperties);
}
//todo: response for case mixed case where some subset of creations fail
@Override
public Collection<String> createResources(Request request)
throws InvalidQueryException, ResourceNotFoundException, ResourceAlreadyExistsException {
Collection<String> relativeUrls = new ArrayList<>();
AtlasQuery atlasQuery = queryFactory.createEntityQuery(request);
Collection<String> guids = new ArrayList<>();
for (Map<String, Object> entityMap: atlasQuery.execute()) {
guids.add(String.valueOf(entityMap.get("id")));
}
Collection<Map<String, String>> tagMaps = request.getProperty("tags");
for (Map<String, String> tagMap : tagMaps) {
Result termResult = getTermQueryResult(tagMap.get("name"));
relativeUrls.addAll(tagEntities(guids, termResult.getPropertyMaps().iterator().next()));
}
return relativeUrls;
}
private Result getTermQueryResult(String termName) throws ResourceNotFoundException {
Request tagRequest = new InstanceRequest(
Collections.<String, Object>singletonMap("termPath", new TermPath(termName)));
tagRequest.addAdditionalSelectProperties(Collections.singleton("type"));
return getTermResourceProvider().getResourceById(tagRequest);
}
private Collection<String> tagEntities(Collection<String> entityGuids, Map<String, Object> termProperties)
throws ResourceAlreadyExistsException {
Collection<String> relativeUrls = new ArrayList<>();
for (String guid : entityGuids) {
//createTermEdge(entity, Collections.singleton(termVertex));
// copy term properties from trait associated with taxonomy to be set
// on trait associated with new entity (basically clone at time of tag event)
//todo: any changes to 'singleton' trait won't be reflected in new trait
//todo: iterate over properties in term definition instead of hard coding here
Map<String, Object> properties = new HashMap<>();
String termName = String.valueOf(termProperties.get("name"));
properties.put("name", termName);
properties.put("description", termProperties.get("description"));
typeSystem.createTraitInstance(guid, termName, properties);
//todo: *** shouldn't know anything about href structure in this class ***
relativeUrls.add(String.format("v1/entities/%s/tags/%s", guid, termName));
}
return relativeUrls;
}
protected synchronized ResourceProvider getTermResourceProvider() {
if (termResourceProvider == null) {
termResourceProvider = new TermResourceProvider(typeSystem);
}
return termResourceProvider;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.Map;
/**
* A request for an instance resource.
*/
public class InstanceRequest extends BaseRequest {
public InstanceRequest(Map<String, Object> properties) {
super(properties, null);
}
@Override
public Cardinality getCardinality() {
return Cardinality.INSTANCE;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import com.google.gson.stream.JsonWriter;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* JSON serializer.
*/
public class JsonSerializer {
public String serialize(Result result, UriInfo ui) {
Writer json = new StringWriter();
JsonWriter writer = new JsonWriter(json);
writer.setIndent(" ");
try {
writeValue(writer, result.getPropertyMaps(), ui.getBaseUri().toASCIIString());
} catch (IOException e) {
throw new CatalogRuntimeException("Unable to write JSON response.", e);
}
return json.toString();
}
private void writeValue(JsonWriter writer, Object value, String baseUrl) throws IOException {
if (value == null) {
writer.nullValue();
} else if (value instanceof Map) {
writer.beginObject();
LinkedHashMap<String, Object> nonScalarMap = new LinkedHashMap<>();
for (Map.Entry<String, Object> entry : ((Map<String, Object>) value).entrySet()) {
String key = entry.getKey();
Object val = entry.getValue();
if (val == null || ! (val instanceof Collection || val instanceof Map)) {
//todo: use a token in value instead of prop name
if (key.equals("href")) {
val = baseUrl + String.valueOf(val);
}
writer.name(key);
writeValue(writer, val, baseUrl);
} else {
nonScalarMap.put(key, val);
}
}
for (Map.Entry<String, Object> entry : nonScalarMap.entrySet()) {
writer.name(entry.getKey());
writeValue(writer, entry.getValue(), baseUrl);
}
writer.endObject();
} else if (value instanceof Collection) {
writer.beginArray();
for (Object o : (Collection) value) {
writeValue(writer, o, baseUrl);
}
writer.endArray();
} else if (value instanceof Number) {
writer.value((Number) value);
} else if (value instanceof Boolean) {
writer.value((Boolean) value);
} else {
// everything else is String
writer.value(String.valueOf(value));
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
/**
* Translates property names to/from name exposed in API to internal fully qualified name.
*/
public interface PropertyMapper {
/**
* Translate a qualified name to a clean name.
*
* @param propName property name to translate
* @param type resource type
*
* @return clean property name
*/
String toCleanName(String propName, String type);
/**
* Translate a clean name to a fully qualified name.
*
* @param propName property name to translate
* @param type resource type
*
* @return fully qualified property name
*/
String toFullyQualifiedName(String propName, String type);
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
/**
* A rule for translating a property value.
*/
public interface PropertyValueFormatter <T,V> {
/**
* Format a property value.
*
* @param value property value to format
*
* @return formatted property value
*/
V format(T value);
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.Collection;
import java.util.Map;
/**
* Represents a user request.
*/
public interface Request {
/**
* Request cardinality enum.
*/
enum Cardinality {INSTANCE, COLLECTION}
/**
* Get request properties.
*
* @return request property map
*/
Map<String, Object> getProperties();
/**
* Get the value of a specified property.
*
* @param name property name
* @param <T> value type
*
* @return value for the requested property or null if property not in map
*/
<T> T getProperty(String name);
/**
* Get the query string.
*
* @return the user specified query string or null
*/
String getQueryString();
/**
* Get the cardinality of the request.
*
* @return the request cardinality
*/
Cardinality getCardinality();
/**
* Add additional property names which should be returned in the result.
*
* @param resultProperties collection of property names
*/
void addAdditionalSelectProperties(Collection<String> resultProperties);
/**
* Get any additional property names which should be included in the result.
*
* @return collection of added property names or an empty collection
*/
Collection<String> getAdditionalSelectProperties();
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
/**
* Provides key ordering for resource property maps.
* Ordering can be defined explicitly for specific properties,
* otherwise natural ordering is used.
*/
public class ResourceComparator implements Comparator<String> {
private static List<String> ordering = new ArrayList<>();
@Override
public int compare(String s1, String s2) {
if (s1.equals(s2)) {
return 0;
}
int s1Order = ordering.indexOf(s1);
int s2Order = ordering.indexOf(s2);
if (s1Order == -1 && s2Order == -1) {
return s1.compareTo(s2);
}
if (s1Order != -1 && s2Order != -1) {
return s1Order - s2Order;
}
return s1Order == -1 ? 1 : -1;
}
//todo: each resource definition can provide its own ordering list
static {
ordering.add("href");
ordering.add("name");
ordering.add("id");
ordering.add("description");
ordering.add("type");
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.exception.*;
import java.util.Collection;
/**
* Provider for a resource type.
*/
public interface ResourceProvider {
/**
* Get a resource by primary key.
*
* @param request request instance which contains the required id properties and no query string
* @return result containing the requested resource; never null
*
* @throws ResourceNotFoundException if the requested resource isn't found
*/
Result getResourceById(Request request) throws ResourceNotFoundException;
/**
* Get all resources which match the provider query.
*
* @param request request instance which will include a query string and possibly properties
* @return result containing collection of matching resources. If no resources match
* a result is returned with no resources
*
* @throws InvalidQueryException if the user query contains invalid syntax
* @throws ResourceNotFoundException if a parent resource of the requested resource doesn't exist
*/
Result getResources(Request request) throws InvalidQueryException, ResourceNotFoundException;
/**
* Create a single resource.
*
* @param request request instance containing the contents of the resource to create
*
* @throws InvalidPayloadException if the payload or any other part of the user request is invalid
* @throws ResourceAlreadyExistsException if the resource already exists
* @throws ResourceNotFoundException if a parent of the resource to create doesn't exist
*/
void createResource(Request request)
throws InvalidPayloadException, ResourceAlreadyExistsException, ResourceNotFoundException;
//todo: define the behavior for partial success
/**
* Create multiple resources.
*
* @param request request instance containing the contents of 1..n resources
* @return collection of relative urls for the created resources
*
* @throws InvalidPayloadException if the payload or any other part of the user request is invalid
* @throws ResourceAlreadyExistsException if the resource already exists
* @throws ResourceNotFoundException if a parent of the resource to create doesn't exist
*/
Collection<String> createResources(Request request) throws CatalogException;
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import java.util.Collection;
import java.util.Map;
/**
* Resource provider result.
*/
public class Result {
/**
* collection of property maps
*/
private Collection<Map<String, Object>> propertyMaps;
/**
* Constructor.
*
* @param propertyMaps collection of property maps
*/
public Result(Collection<Map<String, Object>> propertyMaps) {
this.propertyMaps = propertyMaps;
}
/**
* Obtain the result property maps.
*
* @return result property maps
*/
public Collection<Map<String, Object>> getPropertyMaps() {
return propertyMaps;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.definition.TaxonomyResourceDefinition;
import org.apache.atlas.catalog.exception.*;
import org.apache.atlas.catalog.query.AtlasQuery;
import java.util.*;
/**
* Provider for taxonomy resources.
*/
public class TaxonomyResourceProvider extends BaseResourceProvider implements ResourceProvider {
private static final ResourceDefinition resourceDefinition = new TaxonomyResourceDefinition();
public TaxonomyResourceProvider(AtlasTypeSystem typeSystem) {
super(typeSystem);
}
@Override
public Result getResourceById(Request request) throws ResourceNotFoundException {
AtlasQuery atlasQuery;
try {
atlasQuery = queryFactory.createTaxonomyQuery(request);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Unable to compile internal Taxonomy query: " + e, e);
}
Collection<Map<String, Object>> results = atlasQuery.execute();
if (results.isEmpty()) {
throw new ResourceNotFoundException(String.format("Taxonomy '%s' not found.",
request.getProperty(resourceDefinition.getIdPropertyName())));
}
return new Result(results);
}
public Result getResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
AtlasQuery atlasQuery = queryFactory.createTaxonomyQuery(request);
return new Result(atlasQuery.execute());
}
public synchronized void createResource(Request request)
throws InvalidPayloadException, ResourceAlreadyExistsException {
resourceDefinition.validate(request);
ensureTaxonomyDoesntExist(request);
typeSystem.createEntity(resourceDefinition, request);
}
@Override
public Collection<String> createResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
throw new UnsupportedOperationException("Creating multiple Taxonomies in a request is not currently supported");
}
private void ensureTaxonomyDoesntExist(Request request) throws ResourceAlreadyExistsException {
try {
getResourceById(request);
throw new ResourceAlreadyExistsException(String.format("Taxonomy '%s' already exists.",
request.getProperty("name")));
} catch (ResourceNotFoundException e) {
// expected case
}
}
}
\ No newline at end of file
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
/**
* Term path information.
*/
//todo: split between Term and TermPath
public class TermPath {
private final String m_taxonomy;
private final String m_fqn;
private final String m_name;
private final String[] m_paths;
public TermPath(String fullyQualifiedName) {
m_fqn = fullyQualifiedName;
//todo: validation
int idx = fullyQualifiedName.indexOf('.');
if (idx != -1) {
m_taxonomy = fullyQualifiedName.substring(0, idx);
m_name = fullyQualifiedName.substring(idx + 1);
m_paths = m_name.split("\\.");
} else {
m_taxonomy = fullyQualifiedName;
m_name = null;
m_paths = new String[0];
}
}
public TermPath(String taxonomyName, String termName) {
m_taxonomy = taxonomyName;
m_name = termName != null && termName.isEmpty() ? null : termName;
if (m_name != null) {
m_fqn = String.format("%s.%s", taxonomyName, termName);
m_paths = termName.split("\\.");
} else {
m_fqn = taxonomyName;
m_paths = new String[0];
}
}
/**
* Get the absolute term name which is in the form of TAXONOMY_NAME.TERM_NAME
*
* @return absolute term name which includes the taxonomy name
*/
public String getFullyQualifiedName() {
return m_fqn;
}
/**
* Get the term name. This differs from the absolute name in that it doesn't
* include the taxonomy name.
*
* @return the term name
*/
public String getName() {
return m_name;
}
/**
* Get the short name for the term which doesn't include any taxonomy or parent information.
* @return term short name
*/
public String getShortName() {
return m_paths[m_paths.length - 1];
}
public String getPath() {
if (m_name == null) {
return "/";
} else {
int idx = m_fqn.indexOf('.');
int lastIdx = m_fqn.lastIndexOf('.');
return idx == lastIdx ? "/" :
m_fqn.substring(idx, lastIdx).replaceAll("\\.", "/");
}
}
public TermPath getParent() {
//todo: if this is the root path, throw exception
return new TermPath(m_taxonomy, m_name.substring(0, m_name.lastIndexOf('.')));
}
public String getTaxonomyName() {
return m_taxonomy;
}
public String[] getPathSegments() {
return m_paths;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.definition.TermResourceDefinition;
import org.apache.atlas.catalog.exception.*;
import org.apache.atlas.catalog.query.AtlasQuery;
import java.util.*;
/**
* Provider for Term resources.
*/
public class TermResourceProvider extends BaseResourceProvider implements ResourceProvider {
private final static ResourceDefinition resourceDefinition = new TermResourceDefinition();
private TaxonomyResourceProvider taxonomyResourceProvider;
public TermResourceProvider(AtlasTypeSystem typeSystem) {
super(typeSystem);
}
@Override
public Result getResourceById(Request request) throws ResourceNotFoundException {
//todo: shouldn't need to add this here
request.getProperties().put("name", request.<TermPath>getProperty("termPath").getFullyQualifiedName());
AtlasQuery atlasQuery;
try {
atlasQuery = queryFactory.createTermQuery(request);
} catch (InvalidQueryException e) {
throw new CatalogRuntimeException("Unable to compile internal Term query: " + e, e);
}
Collection<Map<String, Object>> results = atlasQuery.execute();
if (results.isEmpty()) {
throw new ResourceNotFoundException(String.format("Term '%s' not found.",
request.<TermPath>getProperty("termPath").getFullyQualifiedName()));
}
return new Result(results);
}
public Result getResources(Request request)
throws InvalidQueryException, ResourceNotFoundException {
TermPath termPath = request.getProperty("termPath");
String queryString = doQueryStringConversions(termPath, request.getQueryString());
Request queryRequest = new CollectionRequest(request.getProperties(), queryString);
AtlasQuery atlasQuery = queryFactory.createTermQuery(queryRequest);
Collection<Map<String, Object>> result = atlasQuery.execute();
return new Result(result);
}
public void createResource(Request request)
throws InvalidPayloadException, ResourceAlreadyExistsException, ResourceNotFoundException {
TermPath termPath = (TermPath) request.getProperties().remove("termPath");
String qualifiedTermName = termPath.getFullyQualifiedName();
request.getProperties().put("name", qualifiedTermName);
resourceDefinition.validate(request);
// get taxonomy
Request taxonomyRequest = new InstanceRequest(
Collections.<String, Object>singletonMap("name", termPath.getTaxonomyName()));
taxonomyRequest.addAdditionalSelectProperties(Collections.singleton("id"));
Result taxonomyResult = getTaxonomyResourceProvider().getResourceById(taxonomyRequest);
Map<String, Object> taxonomyPropertyMap = taxonomyResult.getPropertyMaps().iterator().next();
// ensure that parent exists if not a root level term
if (! termPath.getPath().equals("/")) {
Map<String, Object> parentProperties = new HashMap<>(request.getProperties());
parentProperties.put("termPath", termPath.getParent());
getResourceById(new InstanceRequest(parentProperties));
}
typeSystem.createTraitType(resourceDefinition, qualifiedTermName,
request.<String>getProperty("description"));
typeSystem.createTraitInstance(String.valueOf(taxonomyPropertyMap.get("id")),
qualifiedTermName, request.getProperties());
}
@Override
public Collection<String> createResources(Request request) throws InvalidQueryException, ResourceNotFoundException {
throw new UnsupportedOperationException("Creating multiple Terms in a request is not currently supported");
}
//todo: add generic support for pre-query modification of expected value
//todo: similar path parsing code is used in several places in this class
private String doQueryStringConversions(TermPath termPath, String queryStr) throws InvalidQueryException {
String hierarchyPathProp = "hierarchy/path";
// replace "."
if (queryStr != null && queryStr.contains(String.format("%s:.", hierarchyPathProp))) {
//todo: regular expression replacement
queryStr = queryStr.replaceAll(String.format("%s:.", hierarchyPathProp),
String.format("%s:%s", hierarchyPathProp, termPath.getPath()));
}
return queryStr;
}
protected synchronized ResourceProvider getTaxonomyResourceProvider() {
if (taxonomyResourceProvider == null) {
taxonomyResourceProvider = new TaxonomyResourceProvider(typeSystem);
}
return taxonomyResourceProvider;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.Collections;
/**
* Wrapper for term vertices.
*/
public class TermVertexWrapper extends VertexWrapper {
public TermVertexWrapper(Vertex v) {
super(v, new EntityTagResourceDefinition());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.*;
/**
* Wrapper for vertices which provides additional information.
*/
public class VertexWrapper {
private final Vertex vertex;
private final String vertexType;
private final Set<String> removedProperties = new HashSet<>();
private final PropertyMapper propertyMapper;
private final Map<String, PropertyValueFormatter> propertyValueFormatters;
protected ResourceComparator resourceComparator = new ResourceComparator();
public VertexWrapper(Vertex v, ResourceDefinition resourceDefinition) {
this(v, resourceDefinition.getPropertyMapper(), resourceDefinition.getPropertyValueFormatters());
}
public VertexWrapper(Vertex v,
PropertyMapper mapper,
Map<String, PropertyValueFormatter> formatters) {
vertex = v;
vertexType = getVertexType(v);
propertyMapper = mapper;
propertyValueFormatters = formatters;
}
public Vertex getVertex() {
return vertex;
}
public <T> T getProperty(String name) {
T val;
if (removedProperties.contains(name)) {
val = null;
} else {
val = vertex.getProperty(propertyMapper.toFullyQualifiedName(name, vertexType));
if (propertyValueFormatters.containsKey(name)) {
//todo: fix typing of property mapper
val = (T) propertyValueFormatters.get(name).format(val);
}
}
return val;
}
public Collection<String> getPropertyKeys() {
Collection<String> propertyKeys = new TreeSet<>(resourceComparator);
for (String p : vertex.getPropertyKeys()) {
String cleanName = propertyMapper.toCleanName(p, vertexType);
if (! removedProperties.contains(cleanName)) {
propertyKeys.add(cleanName);
}
}
return propertyKeys;
}
public Map<String, Object> getPropertyMap() {
Map<String, Object> props = new TreeMap<>(resourceComparator);
for (String p : vertex.getPropertyKeys()) {
String cleanName = propertyMapper.toCleanName(p, vertexType);
if (! removedProperties.contains(cleanName)) {
Object val = vertex.getProperty(p);
if (propertyValueFormatters.containsKey(cleanName)) {
val = propertyValueFormatters.get(cleanName).format(val);
}
props.put(cleanName, val);
}
}
return props;
}
public void removeProperty(String name) {
removedProperties.add(name);
}
public boolean isPropertyRemoved(String name) {
return removedProperties.contains(name);
}
public String toString() {
return String.format("VertexWrapper[name=%s]", getProperty("name"));
}
private String getVertexType(Vertex v) {
return v.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.AtlasException;
import org.apache.atlas.catalog.*;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.TypeSystem;
import java.util.*;
/**
* Base class for resource definitions.
*/
public abstract class BaseResourceDefinition implements ResourceDefinition {
protected static final TypeSystem typeSystem = TypeSystem.getInstance();
protected final Set<String> instanceProperties = new HashSet<>();
protected final Set<String> collectionProperties = new HashSet<>();
protected Map<String, AttributeDefinition> propertyDefs = new HashMap<>();
protected Map<String, AttributeInfo> properties = new HashMap<>();
protected final Map<String, Projection> projections = new HashMap<>();
protected final Map<String, Relation> relations = new HashMap<>();
protected final PropertyMapper propertyMapper;
protected final Map<String, PropertyValueFormatter> propertyValueFormatters = new HashMap<>();
public BaseResourceDefinition() {
DefaultDateFormatter defaultDateFormatter = new DefaultDateFormatter();
registerPropertyValueFormatter("creation_time", defaultDateFormatter);
registerPropertyValueFormatter("modified_time", defaultDateFormatter);
this.propertyMapper = createPropertyMapper();
}
@Override
public void validate(Request request) throws InvalidPayloadException {
Collection<String> propKeys = new HashSet<>(request.getProperties().keySet());
Collection<String> missingProperties = new HashSet<>();
for (AttributeInfo property : properties.values()) {
String name = property.name;
if (property.multiplicity == Multiplicity.REQUIRED) {
if (request.getProperty(name) == null) {
missingProperties.add(name);
}
}
propKeys.remove(name);
}
if (! missingProperties.isEmpty() || ! propKeys.isEmpty()) {
throw new InvalidPayloadException(missingProperties, propKeys);
}
//todo: property type validation
}
@Override
public Collection<AttributeDefinition> getPropertyDefinitions() {
return propertyDefs.values();
}
@Override
public Map<String, Object> filterProperties(Request request, Map<String, Object> propertyMap) {
Request.Cardinality cardinality = request.getCardinality();
Collection<String> requestProperties = request.getAdditionalSelectProperties();
Iterator<Map.Entry<String, Object>> propIter = propertyMap.entrySet().iterator();
while(propIter.hasNext()) {
Map.Entry<String, Object> propEntry = propIter.next();
String prop = propEntry.getKey();
if (! requestProperties.contains(prop)) {
if (cardinality == Request.Cardinality.COLLECTION) {
if (! collectionProperties.contains(prop)) {
propIter.remove();
}
} else {
if (! instanceProperties.isEmpty() && ! instanceProperties.contains(prop)) {
propIter.remove();
}
}
}
}
return propertyMap;
}
@Override
public Map<String, Projection> getProjections() {
return projections;
}
@Override
public Map<String, Relation> getRelations() {
return relations;
}
@Override
public synchronized PropertyMapper getPropertyMapper() {
return propertyMapper;
}
@Override
public Map<String, PropertyValueFormatter> getPropertyValueFormatters() {
return propertyValueFormatters;
}
protected void registerProperty(AttributeDefinition propertyDefinition) {
try {
propertyDefs.put(propertyDefinition.name, propertyDefinition);
properties.put(propertyDefinition.name, new AttributeInfo(typeSystem, propertyDefinition, null));
} catch (AtlasException e) {
throw new CatalogRuntimeException("Unable to create attribute: " + propertyDefinition.name, e);
}
}
protected void registerPropertyValueFormatter(String property, PropertyValueFormatter valueFormatter) {
propertyValueFormatters.put(property, valueFormatter);
}
/**
* Create a new property mapper instance.
* Should be overridden in children where the default implementation isn't sufficient.
*
* @return a new property mapper instance
*/
protected PropertyMapper createPropertyMapper() {
return new DefaultPropertyMapper();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
/**
* Entity resource definition.
*/
public class EntityResourceDefinition extends BaseResourceDefinition {
public EntityResourceDefinition() {
collectionProperties.add("name");
collectionProperties.add("id");
collectionProperties.add("type");
RelationProjection tagProjection = getTagProjection();
projections.put("tags", tagProjection);
RelationProjection traitProjection = getTraitProjection();
projections.put("traits", traitProjection);
projections.put("default", getDefaultRelationProjection());
relations.put(tagProjection.getName(), tagProjection.getRelation());
relations.put(traitProjection.getName(), traitProjection.getRelation());
}
@Override
public String getIdPropertyName() {
return "id";
}
// not meaningful for entities
@Override
public String getTypeName() {
return null;
}
@Override
public void validate(Request request) throws InvalidPayloadException {
// no op for entities as we don't currently create entities and
// each entity type is different
}
@Override
public String resolveHref(Map<String, Object> properties) {
Object id = properties.get("id");
return id == null ? null : String.format("v1/entities/%s", id);
}
private RelationProjection getTagProjection() {
Relation traitRelation = new TagRelation();
RelationProjection tagProjection = new RelationProjection("tags", Collections.singleton("name"),
traitRelation, Projection.Cardinality.MULTIPLE);
tagProjection.addPipe(new TransformFunctionPipe<>(
new PipeFunction<Collection<ProjectionResult>, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(Collection<ProjectionResult> results) {
for (ProjectionResult result : results) {
for (Map<String, Object> properties : result.getPropertyMaps()) {
properties.put("href", String.format("v1/entities/%s/tags/%s",
result.getStartingVertex().getProperty("id"), properties.get("name")));
}
}
return results;
}
}));
return tagProjection;
}
private RelationProjection getTraitProjection() {
return new RelationProjection("traits", Collections.<String>emptySet(),
new TraitRelation(), Projection.Cardinality.MULTIPLE);
}
private RelationProjection getDefaultRelationProjection() {
Relation genericRelation = new GenericRelation(this);
RelationProjection relationProjection = new RelationProjection(
"relations",
Arrays.asList("type", "id", "name"),
genericRelation, Projection.Cardinality.MULTIPLE);
relationProjection.addPipe(new TransformFunctionPipe<>(
new PipeFunction<Collection<ProjectionResult>, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(Collection<ProjectionResult> results) {
for (ProjectionResult result : results) {
for (Map<String, Object> properties : result.getPropertyMaps()) {
properties.put("href", String.format("v1/entities/%s", properties.get("id")));
}
}
return results;
}
}));
return relationProjection;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.catalog.DefaultPropertyMapper;
import org.apache.atlas.catalog.PropertyMapper;
import org.apache.atlas.catalog.ResourceComparator;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.ProjectionResult;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import java.util.*;
/**
* Entity Tag resource definition.
*/
public class EntityTagResourceDefinition extends BaseResourceDefinition {
public static final String ENTITY_GUID_PROPERTY = "entity-guid";
public EntityTagResourceDefinition() {
registerProperty(TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE));
instanceProperties.add("name");
instanceProperties.add("description");
instanceProperties.add("creation_time");
collectionProperties.add("name");
collectionProperties.add("description");
projections.put("terms", getTermProjection());
}
@Override
public String getIdPropertyName() {
return "name";
}
//not meaningful for entity tags
@Override
public String getTypeName() {
return null;
}
@Override
public String resolveHref(Map<String, Object> properties) {
return String.format("v1/entities/%s/tags/%s", properties.get(ENTITY_GUID_PROPERTY), properties.get("name"));
}
private Projection getTermProjection() {
return new Projection("term", Projection.Cardinality.SINGLE,
new TransformFunctionPipe<>(new PipeFunction<VertexWrapper, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(VertexWrapper start) {
Map<String, Object> map = new TreeMap<>(new ResourceComparator());
StringBuilder sb = new StringBuilder();
sb.append("v1/taxonomies/");
String fullyQualifiedName = start.getVertex().getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
String[] paths = fullyQualifiedName.split("\\.");
// first path segment is the taxonomy
sb.append(paths[0]);
for (int i = 1; i < paths.length; ++i) {
String path = paths[i];
if (path != null && !path.isEmpty()) {
sb.append("/terms/");
sb.append(path);
}
}
map.put("href", sb.toString());
return Collections.singleton(new ProjectionResult("term", start,
Collections.singleton(map)));
}
}));
}
@Override
protected PropertyMapper createPropertyMapper() {
return new DefaultPropertyMapper(Collections.singletonMap(Constants.ENTITY_TYPE_PROPERTY_KEY, "name"),
Collections.singletonMap("name", Constants.ENTITY_TYPE_PROPERTY_KEY));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.catalog.PropertyMapper;
import org.apache.atlas.catalog.PropertyValueFormatter;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import java.util.Collection;
import java.util.Map;
/**
* Resource definition.
*/
public interface ResourceDefinition {
/**
* The type name of the resource.
*
* @return the resources type name
*/
String getTypeName();
/**
* Validate a user request.
*
* @param request user request
*
* @throws InvalidPayloadException if the request payload is invalid in any way
*/
void validate(Request request) throws InvalidPayloadException;
/**
* Get the name of the resources id property.
*
* @return the id property name
*/
String getIdPropertyName();
/**
* Get the property definitions for the resource.
*
* @return resource property definitions
*/
//todo: abstract usage of AttributeDefinition
Collection<AttributeDefinition> getPropertyDefinitions();
/**
* Filter out properties which shouldn't be returned in the result.
* The passed in map is directly modified as well as returned.
*
* @param request user request
* @param propertyMap property map to filter
*
* @return the filtered property map
*/
Map<String, Object> filterProperties(Request request, Map<String, Object> propertyMap);
/**
* Generate an href for the resource from the provided resource property map.
*
* @param properties resource property map
*
* @return a URL to be used as an href property value for the resource
*/
String resolveHref(Map<String, Object> properties);
/**
* Get map of resource projections.
*
* @return map of resource projections
*/
Map<String, Projection> getProjections();
/**
* Get map of resource relations.
*
* @return map of resource relations
*/
Map<String, Relation> getRelations();
/**
* Get the property mapper associated with the resource.
*
* @return associated property mapper
*/
PropertyMapper getPropertyMapper();
/**
* Get the registered property value formatters.
* @return map of property name to property value formatter
*/
Map<String, PropertyValueFormatter> getPropertyValueFormatters();
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.ProjectionResult;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import java.util.*;
/**
* Taxonomy resource definition.
*/
public class TaxonomyResourceDefinition extends BaseResourceDefinition {
public TaxonomyResourceDefinition() {
registerProperty(TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE));
registerProperty(TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE));
//todo: combine with above registrations
instanceProperties.add("name");
instanceProperties.add("description");
instanceProperties.add("creation_time");
collectionProperties.add("name");
collectionProperties.add("description");
projections.put("terms", getTermsProjection());
}
@Override
public void validate(Request request) throws InvalidPayloadException {
super.validate(request);
if (String.valueOf(request.getProperties().get("name")).contains(".")) {
throw new InvalidPayloadException("The \"name\" property may not contain the character '.'");
}
}
@Override
public String getTypeName() {
return "Taxonomy";
}
@Override
public String getIdPropertyName() {
return "name";
}
@Override
public String resolveHref(Map<String, Object> properties) {
return String.format("v1/taxonomies/%s", properties.get("name"));
}
private Projection getTermsProjection() {
final String termsProjectionName = "terms";
return new Projection(termsProjectionName, Projection.Cardinality.SINGLE,
new TransformFunctionPipe<>(new PipeFunction<VertexWrapper, Collection<ProjectionResult>>() {
private String baseHref = "v1/taxonomies/";
@Override
public Collection<ProjectionResult> compute(VertexWrapper v) {
Map<String, Object> map = new HashMap<>();
map.put("href", baseHref + v.getProperty("name") + "/terms");
return Collections.singleton(new ProjectionResult(termsProjectionName, v,
Collections.singleton(map)));
}
}));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.ResourceComparator;
import org.apache.atlas.catalog.TermPath;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.ProjectionResult;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.types.*;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import java.util.*;
/**
* Term resource definition.
*/
public class TermResourceDefinition extends BaseResourceDefinition {
public TermResourceDefinition() {
registerProperty(TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE));
registerProperty(TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE));
registerProperty(TypesUtil.createOptionalAttrDef("available_as_tag", DataTypes.BOOLEAN_TYPE));
registerProperty(TypesUtil.createOptionalAttrDef("acceptable_use", DataTypes.STRING_TYPE));
instanceProperties.add("name");
instanceProperties.add("description");
instanceProperties.add("creation_time");
instanceProperties.add("available_as_tag");
instanceProperties.add("acceptable_use");
collectionProperties.add("name");
collectionProperties.add("description");
projections.put("terms", getSubTermProjection());
projections.put("hierarchy", getHierarchyProjection());
}
@Override
public void validate(Request request) throws InvalidPayloadException {
super.validate(request);
String name = request.getProperty("name");
// name will be in the fully qualified form: taxonomyName.termName
if (! name.contains(".")) {
throw new InvalidPayloadException("Term name must be in the form 'taxonomyName.termName.subTermName'");
}
if (! request.getProperties().containsKey("available_as_tag")) {
request.getProperties().put("available_as_tag", true);
}
}
@Override
public String getTypeName() {
return "Term";
}
@Override
public String getIdPropertyName() {
return "name";
}
//todo
@Override
public String resolveHref(Map<String, Object> properties) {
StringBuilder sb = new StringBuilder();
sb.append("v1/taxonomies/");
TermPath termPath = new TermPath(String.valueOf(properties.get("name")));
String[] paths = termPath.getPathSegments();
sb.append(termPath.getTaxonomyName());
for (String path : paths) {
//todo: shouldn't need to check for null or empty after TermPath addition
if (path != null && !path.isEmpty()) {
sb.append("/terms/");
sb.append(path);
}
}
return sb.toString();
}
private Projection getHierarchyProjection() {
final String projectionName = "hierarchy";
return new Projection(projectionName, Projection.Cardinality.SINGLE,
new TransformFunctionPipe<>(new PipeFunction<VertexWrapper, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(VertexWrapper start) {
Map<String, Object> map = new TreeMap<>(new ResourceComparator());
TermPath termPath = new TermPath(start.getVertex().<String>getProperty(
Constants.ENTITY_TYPE_PROPERTY_KEY));
map.put("path", termPath.getPath());
map.put("short_name", termPath.getShortName());
map.put("taxonomy", termPath.getTaxonomyName());
return Collections.singleton(new ProjectionResult(projectionName, start,
Collections.singleton(map)));
}
}));
}
private Projection getSubTermProjection() {
//todo: combine with other term projections
final String termsProjectionName = "terms";
return new Projection(termsProjectionName, Projection.Cardinality.SINGLE,
new TransformFunctionPipe<>(new PipeFunction<VertexWrapper, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(VertexWrapper start) {
Map<String, Object> map = new TreeMap<>(new ResourceComparator());
StringBuilder sb = new StringBuilder();
sb.append("v1/taxonomies/");
TermPath termPath = new TermPath(start.getVertex().<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY));
String[] paths = termPath.getPathSegments();
sb.append(termPath.getTaxonomyName());
for (String path : paths) {
//todo: shouldn't need to check for null or empty after TermPath addition
if (path != null && !path.isEmpty()) {
sb.append("/terms/");
sb.append(path);
}
}
sb.append("/terms");
map.put("href", sb.toString());
return Collections.singleton(new ProjectionResult(termsProjectionName, start,
Collections.singleton(map)));
}
}));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
/**
* Base checked catalog exception.
*/
public class CatalogException extends Exception {
private int status;
public CatalogException(String message, int status) {
super(message);
this.status = status;
}
public int getStatus() {
return status;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
/**
* Base runtime catalog exception.
*/
public class CatalogRuntimeException extends RuntimeException {
int statusCode = 500;
public CatalogRuntimeException(Exception e) {
super("", e);
}
public CatalogRuntimeException(String message, Exception e) {
super(message, e);
}
public CatalogRuntimeException(String message, int statusCode) {
super(message);
this.statusCode = statusCode;
}
public int getStatusCode() {
return statusCode;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
import java.util.Collection;
/**
* Exception used for invalid API payloads.
*/
public class InvalidPayloadException extends CatalogException {
private final static String baseMsg = "Invalid Request.";
private final static String missingMsg = " The following required properties are missing: %s.";
private final static String unknownMsg = " The following properties are not supported: %s";
public InvalidPayloadException(Collection<String> missingProperties, Collection<String> unknownProperties) {
super(baseMsg + (!missingProperties.isEmpty() ? String.format(missingMsg, missingProperties): "") +
(!unknownProperties.isEmpty() ? String.format(unknownMsg, unknownProperties): ""), 400);
}
public InvalidPayloadException(String msg) {
super(msg, 400);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
/**
* Exception for invalid user query.
*/
public class InvalidQueryException extends CatalogException {
public InvalidQueryException(String message) {
super("Unable to parse query: " + message, 400);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
/**
* Exception used when an attempt is made to create a resource which already exists.
*/
public class ResourceAlreadyExistsException extends CatalogException {
public ResourceAlreadyExistsException(String message) {
super(message, 409);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.exception;
/**
* Exception used when an explicitly requested resource doesn't exist.
*/
public class ResourceNotFoundException extends CatalogException {
public ResourceNotFoundException(String message) {
super(message, 404);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/**
* Represents a generic relation
*/
public class GenericRelation implements Relation {
private final ResourceDefinition resourceDefinition;
public GenericRelation(ResourceDefinition resourceDefinition) {
this.resourceDefinition = resourceDefinition;
}
@Override
public Collection<RelationSet> traverse(VertexWrapper vWrapper) {
Collection<RelationSet> relations = new ArrayList<>();
Vertex v = vWrapper.getVertex();
String vertexType = v.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
Map<String, Collection<VertexWrapper>> vertexMap = new HashMap<>();
for (Edge e : v.getEdges(Direction.OUT)) {
String edgeLabel = e.getLabel();
String edgePrefix = String.format("%s%s.", Constants.INTERNAL_PROPERTY_KEY_PREFIX, vertexType);
if (edgeLabel.startsWith(edgePrefix)) {
Vertex adjacentVertex = e.getVertex(Direction.IN);
VertexWrapper relationVertex = new VertexWrapper(adjacentVertex, resourceDefinition);
String relationName = edgeLabel.substring(edgePrefix.length());
Collection<VertexWrapper> vertices = vertexMap.get(relationName);
if (vertices == null) {
vertices = new ArrayList<>();
vertexMap.put(relationName, vertices);
}
vertices.add(relationVertex);
}
}
for (Map.Entry<String, Collection<VertexWrapper>> entry : vertexMap.entrySet()) {
relations.add(new RelationSet(entry.getKey(), entry.getValue()));
}
return relations;
}
@Override
public Pipe asPipe() {
return null;
}
@Override
public ResourceDefinition getResourceDefinition() {
return resourceDefinition;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.util.Pipeline;
import org.apache.atlas.catalog.VertexWrapper;
import java.util.Collection;
import java.util.Collections;
/**
* Projection representation.
* Used to project properties onto a resource from another source.
*/
public class Projection {
public enum Cardinality {SINGLE, MULTIPLE}
private final String m_name;
private final Cardinality m_cardinality;
protected Pipeline<VertexWrapper, Collection<ProjectionResult>> m_pipeline = new Pipeline<>();
public Projection(String name, Cardinality cardinality) {
m_name = name;
m_cardinality = cardinality;
}
public Projection(String name, Cardinality cardinality, Pipe<VertexWrapper, Collection<ProjectionResult>> pipe) {
m_name = name;
m_cardinality = cardinality;
m_pipeline.addPipe(pipe);
}
public Collection<ProjectionResult> values(VertexWrapper start) {
m_pipeline.setStarts(Collections.singleton(start));
return m_pipeline.iterator().next();
}
public void addPipe(Pipe<Collection<ProjectionResult>, Collection<ProjectionResult>> p) {
m_pipeline.addPipe(p);
}
public String getName() {
return m_name;
}
public Cardinality getCardinality() {
return m_cardinality;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import org.apache.atlas.catalog.VertexWrapper;
import java.util.Collection;
import java.util.Map;
/**
* Result of a projection.
*/
public class ProjectionResult {
private final VertexWrapper m_startVertex;
private final String m_name;
private final Collection<Map<String, Object>> m_propertyMaps;
public ProjectionResult(String name, VertexWrapper startingVertex, Collection<Map<String, Object>> propertyMaps) {
m_name = name;
m_startVertex = startingVertex;
m_propertyMaps = propertyMaps;
}
public String getName() {
return m_name;
}
public VertexWrapper getStartingVertex() {
return m_startVertex;
}
public Collection<Map<String, Object>> getPropertyMaps() {
return m_propertyMaps;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import java.util.Collection;
/**
* Represents the relationship from one vertex to another via an edge.
*/
public interface Relation {
/**
* Traverse the relation.
*
* @param vWrapper vertex to start traversal from
*
* @return results of the traversal
*/
Collection<RelationSet> traverse(VertexWrapper vWrapper);
/**
* Get the pipe representation of the traversal.
*
* @return pipe representation
*/
Pipe asPipe();
/**
* Get the associated resource definition.
*
* @return associated resource definition
*/
ResourceDefinition getResourceDefinition();
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.catalog.ResourceComparator;
import org.apache.atlas.catalog.VertexWrapper;
import java.util.*;
/**
* Projection based on a relation.
*/
public class RelationProjection extends Projection {
private Relation relation;
public RelationProjection(String name, final Collection<String> fields, final Relation relation, Cardinality cardinality) {
super(name, cardinality, new TransformFunctionPipe<>(
new PipeFunction<VertexWrapper, Collection<ProjectionResult>>() {
@Override
public Collection<ProjectionResult> compute(VertexWrapper start) {
Collection<ProjectionResult> projectionResults = new ArrayList<>();
for (RelationSet relationSet : relation.traverse(start)) {
Collection<Map<String, Object>> propertyMaps = new ArrayList<>();
for (VertexWrapper vWrapper : relationSet.getVertices()) {
Map<String, Object> propertyMap = new TreeMap<>(new ResourceComparator());
propertyMaps.add(propertyMap);
if (fields.isEmpty()) {
for (String property : vWrapper.getPropertyKeys()) {
propertyMap.put(property, vWrapper.<String>getProperty(property));
}
} else {
for (String property : fields) {
propertyMap.put(property, vWrapper.<String>getProperty(property));
}
}
}
projectionResults.add(new ProjectionResult(relationSet.getName(), start, propertyMaps));
}
return projectionResults;
}
}));
this.relation = relation;
}
public Relation getRelation() {
return relation;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import org.apache.atlas.catalog.VertexWrapper;
import java.util.*;
/**
* Encapsulates the response of a relation traversal.
*/
public class RelationSet {
private final String m_name;
private final Collection<VertexWrapper> m_vertices;
public RelationSet(String name, Collection<VertexWrapper> vertices) {
m_name = name;
m_vertices = vertices;
}
public String getName() {
return m_name;
}
public Collection<VertexWrapper> getVertices() {
return Collections.unmodifiableCollection(m_vertices);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.filter.FilterFunctionPipe;
import org.apache.atlas.catalog.TermVertexWrapper;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
/**
* Relation for adjacent Tag vertices.
*/
public class TagRelation implements Relation {
private static ResourceDefinition resourceDefinition = new EntityTagResourceDefinition();
@Override
public Collection<RelationSet> traverse(VertexWrapper vWrapper) {
Vertex v = vWrapper.getVertex();
Collection<VertexWrapper> vertices = new ArrayList<>();
for (Edge e : v.getEdges(Direction.OUT)) {
if (e.getLabel().startsWith(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY))) {
VertexWrapper trait = new TermVertexWrapper(e.getVertex(Direction.IN));
if (trait.getPropertyKeys().contains("available_as_tag")) {
vertices.add(trait);
}
}
}
return Collections.singletonList(new RelationSet("tags", vertices));
}
@Override
public Pipe asPipe() {
return new FilterFunctionPipe<>(new PipeFunction<Edge, Boolean>() {
@Override
public Boolean compute(Edge edge) {
String name = edge.getVertex(Direction.OUT).getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
VertexWrapper v = new TermVertexWrapper(edge.getVertex(Direction.IN));
return edge.getLabel().startsWith(name) && v.getPropertyKeys().contains("available_as_tag");
}
});
}
@Override
public ResourceDefinition getResourceDefinition() {
return resourceDefinition;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.projection;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.filter.FilterFunctionPipe;
import org.apache.atlas.catalog.TermVertexWrapper;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
/**
* Trait specific relation.
*/
//todo: combine with TagRelation
public class TraitRelation implements Relation {
//todo: for now using entity tag resource definition
private static ResourceDefinition resourceDefinition = new EntityTagResourceDefinition();
@Override
public Collection<RelationSet> traverse(VertexWrapper vWrapper) {
Vertex v = vWrapper.getVertex();
Collection<VertexWrapper> vertices = new ArrayList<>();
for (Edge e : v.getEdges(Direction.OUT)) {
if (e.getLabel().startsWith(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY))) {
VertexWrapper trait = new TermVertexWrapper(e.getVertex(Direction.IN));
if (! trait.getPropertyKeys().contains("available_as_tag")) {
vertices.add(trait);
}
}
}
return Collections.singletonList(new RelationSet("traits", vertices));
}
@Override
public Pipe asPipe() {
return new FilterFunctionPipe<>(new PipeFunction<Edge, Boolean>() {
@Override
public Boolean compute(Edge edge) {
String type = edge.getVertex(Direction.OUT).getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
VertexWrapper v = new TermVertexWrapper(edge.getVertex(Direction.IN));
return edge.getLabel().startsWith(type) && ! v.getPropertyKeys().contains("available_as_tag");
}
});
}
@Override
public ResourceDefinition getResourceDefinition() {
return resourceDefinition;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.VertexWrapper;
/**
* Query expression which always returns true.
*/
public class AlwaysQueryExpression extends BaseQueryExpression {
protected AlwaysQueryExpression() {
super(null, null, null);
}
@Override
public Pipe asPipe() {
return null;
}
@Override
public boolean evaluate(VertexWrapper vWrapper) {
return ! negate;
}
@Override
public boolean evaluate(Object value) {
return true;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
/**
* Entity resource query.
*/
public class AtlasEntityQuery extends BaseQuery {
public AtlasEntityQuery(QueryExpression queryExpression, ResourceDefinition resourceDefinition, Request request) {
super(queryExpression, resourceDefinition, request);
}
protected GremlinPipeline getInitialPipeline() {
//todo: the property 'entityText' isn't currently indexed
//todo: we could use Constants.ENTITY_TYPE_PROPERTY_KEY initially but trait instances also contain this property
return new GremlinPipeline(getGraph()).V().has(Constants.ENTITY_TEXT_PROPERTY_KEY).
hasNot(Constants.ENTITY_TYPE_PROPERTY_KEY, "Taxonomy");
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.filter.FilterFunctionPipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TermVertexWrapper;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.HashMap;
import java.util.Map;
/**
* Entity Tag resource query.
*/
public class AtlasEntityTagQuery extends BaseQuery {
private final String guid;
public AtlasEntityTagQuery(QueryExpression queryExpression, ResourceDefinition resourceDefinition, String guid, Request request) {
super(queryExpression, resourceDefinition, request);
this.guid = guid;
}
@Override
protected GremlinPipeline getInitialPipeline() {
GremlinPipeline p = new GremlinPipeline(getGraph()).V().has(Constants.GUID_PROPERTY_KEY, guid).outE();
//todo: this is basically the same pipeline used in TagRelation.asPipe()
p.add(new FilterFunctionPipe<>(new PipeFunction<Edge, Boolean>() {
@Override
public Boolean compute(Edge edge) {
String type = edge.getVertex(Direction.OUT).getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
VertexWrapper v = new TermVertexWrapper(edge.getVertex(Direction.IN));
return edge.getLabel().startsWith(type) && v.getPropertyKeys().contains("available_as_tag");
}
}));
return p.inV();
}
//todo: duplication of effort with resource definition
@Override
protected void addHref(Map<String, Object> propertyMap) {
Map<String, Object> map = new HashMap<>(propertyMap);
map.put(EntityTagResourceDefinition.ENTITY_GUID_PROPERTY, guid);
String href = resourceDefinition.resolveHref(map);
if (href != null) {
propertyMap.put("href", href);
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import java.util.Collection;
import java.util.Map;
/**
* Query functionality.
*/
public interface AtlasQuery {
/**
* Execute the query.
*
* @return collection of property maps, one per matching resource
* @throws ResourceNotFoundException if an explicitly specified resource doesn't exist
*/
Collection<Map<String, Object>> execute() throws ResourceNotFoundException;
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.definition.ResourceDefinition;
/**
* Taxonomy resource query.
*/
public class AtlasTaxonomyQuery extends BaseQuery {
public AtlasTaxonomyQuery(QueryExpression queryExpression, ResourceDefinition resourceDefinition, Request request) {
super(queryExpression, resourceDefinition, request);
}
@Override
protected GremlinPipeline getInitialPipeline() {
return new GremlinPipeline(getGraph()).V().has("__typeName", "Taxonomy");
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.thinkaurelius.titan.core.attribute.Text;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TermPath;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
/**
* Term resource query.
*/
public class AtlasTermQuery extends BaseQuery {
private final TermPath termPath;
public AtlasTermQuery(QueryExpression queryExpression, ResourceDefinition resourceDefinition, TermPath termPath, Request request) {
super(queryExpression, resourceDefinition, request);
this.termPath = termPath;
}
@Override
protected GremlinPipeline getInitialPipeline() {
return new GremlinPipeline(getGraph()).V().has("Taxonomy.name", termPath.getTaxonomyName()).out().
has(Constants.ENTITY_TYPE_PROPERTY_KEY, Text.PREFIX, termPath.getFullyQualifiedName());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.ProjectionResult;
import org.apache.atlas.repository.graph.TitanGraphProvider;
import java.util.*;
/**
* Base Query implementation.
*/
public abstract class BaseQuery implements AtlasQuery {
protected final QueryExpression queryExpression;
protected final ResourceDefinition resourceDefinition;
protected final Request request;
public BaseQuery(QueryExpression queryExpression, ResourceDefinition resourceDefinition, Request request) {
this.queryExpression = queryExpression;
this.resourceDefinition = resourceDefinition;
this.request = request;
}
public Collection<Map<String, Object>> execute() throws ResourceNotFoundException {
Collection<Map<String, Object>> resultMaps = new ArrayList<>();
for (Vertex vertex : executeQuery()) {
resultMaps.add(processPropertyMap(new VertexWrapper(vertex, resourceDefinition)));
}
return resultMaps;
}
private List<Vertex> executeQuery() {
GremlinPipeline pipeline = getInitialPipeline().as("root");
Pipe adapterPipe = queryExpression.asPipe();
//todo: AlwaysQueryAdapter returns null for pipe
//todo: Is there a no-op pipe that I could add that wouldn't negatively affect performance
return adapterPipe == null ?
pipeline.toList() :
pipeline.add(adapterPipe).back("root").toList();
}
protected abstract GremlinPipeline getInitialPipeline();
// todo: consider getting
protected Map<String, Object> processPropertyMap(VertexWrapper vertex) {
Map<String, Object> propertyMap = vertex.getPropertyMap();
resourceDefinition.filterProperties(request, propertyMap);
addHref(propertyMap);
return request.getCardinality() == Request.Cardinality.INSTANCE ?
applyProjections(vertex, propertyMap) :
propertyMap;
}
protected void addHref(Map<String, Object> propertyMap) {
String href = resourceDefinition.resolveHref(propertyMap);
if (href != null) {
propertyMap.put("href", href);
}
}
private Map<String, Object> applyProjections(VertexWrapper vertex, Map<String, Object> propertyMap) {
for (Projection p : resourceDefinition.getProjections().values()) {
for (ProjectionResult projectionResult : p.values(vertex)) {
if (p.getCardinality() == Projection.Cardinality.MULTIPLE) {
propertyMap.put(projectionResult.getName(), projectionResult.getPropertyMaps());
} else {
for (Map<String, Object> projectionMap : projectionResult.getPropertyMaps()) {
propertyMap.put(projectionResult.getName(), projectionMap);
}
}
}
}
return propertyMap;
}
protected QueryExpression getQueryExpression() {
return queryExpression;
}
protected ResourceDefinition getResourceDefinition() {
return resourceDefinition;
}
protected Request getRequest() {
return request;
}
//todo: abstract
// Underlying method is synchronized and caches the graph in a static field
protected TitanGraph getGraph() {
return TitanGraphProvider.getGraphInstance();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.filter.FilterFunctionPipe;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import java.util.Collection;
import java.util.HashSet;
/**
* Base query expression class.
*/
public abstract class BaseQueryExpression implements QueryExpression {
protected String m_field;
protected final String m_expectedValue;
protected final ResourceDefinition resourceDefinition;
protected boolean negate = false;
protected Collection<String> properties = new HashSet<>();
protected BaseQueryExpression(String field, String expectedValue, ResourceDefinition resourceDefinition) {
m_field = field;
if (field != null) {
properties.add(field);
}
m_expectedValue = expectedValue;
this.resourceDefinition = resourceDefinition;
}
@Override
public boolean evaluate(VertexWrapper vWrapper) {
return negate ^ evaluate(vWrapper.getProperty(m_field));
}
@Override
public Collection<String> getProperties() {
return properties;
}
@Override
public boolean evaluate(Object value) {
// subclasses which don't override evaluate(VertexWrapper) should implement this
return false;
}
//todo: use 'has' instead of closure where possible for performance
public Pipe asPipe() {
return new FilterFunctionPipe(new PipeFunction<Vertex, Boolean>() {
@Override
public Boolean compute(Vertex vertex) {
return evaluate(new VertexWrapper(vertex, resourceDefinition));
}
});
}
@Override
public String getField() {
return m_field;
}
@Override
public String getExpectedValue() {
return m_expectedValue;
}
@Override
public void setField(String field) {
m_field = field;
}
@Override
public void setNegate() {
this.negate = true;
}
@Override
public boolean isNegate() {
return negate;
}
@Override
public boolean isProjectionExpression() {
return getField() != null && getField().contains(QueryFactory.PATH_SEP_TOKEN);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.filter.AndFilterPipe;
import com.tinkerpop.pipes.filter.OrFilterPipe;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import java.util.*;
/**
* Expression where operands are other expressions and operator is logical AND or OR
*/
public class BooleanQueryExpression extends BaseQueryExpression {
private final BooleanClause[] clauses;
private final QueryFactory queryFactory;
public BooleanQueryExpression(BooleanQuery query, ResourceDefinition resourceDefinition, QueryFactory queryFactory) {
super(null, null, resourceDefinition);
clauses = query.getClauses();
this.queryFactory = queryFactory;
}
@Override
public Pipe asPipe() {
Map<BooleanClause.Occur, Collection<BooleanClause>> groupedClauses = groupClauses();
Pipe andPipe = null;
Collection<Pipe> andPipes = processAndClauses(groupedClauses);
andPipes.addAll(processNotClauses(groupedClauses));
if (! andPipes.isEmpty()) {
andPipe = new AndFilterPipe(andPipes.toArray(new Pipe[andPipes.size()]));
}
Collection<Pipe> orPipes = processOrClauses(groupedClauses);
if (! orPipes.isEmpty()) {
if (andPipe != null) {
orPipes.add(andPipe);
}
return new OrFilterPipe(orPipes.toArray(new Pipe[orPipes.size()]));
} else {
return andPipe;
}
}
private Map<BooleanClause.Occur, Collection<BooleanClause>> groupClauses() {
Map<BooleanClause.Occur, Collection<BooleanClause>> groupedClauses = new HashMap<>();
for (BooleanClause clause : clauses) {
BooleanClause.Occur occur = resolveClauseOccur(clause);
Collection<BooleanClause> clauseGrouping = groupedClauses.get(occur);
if (clauseGrouping == null) {
clauseGrouping = new ArrayList<>();
groupedClauses.put(occur, clauseGrouping);
}
clauseGrouping.add(clause);
}
return groupedClauses;
}
private BooleanClause.Occur resolveClauseOccur(BooleanClause clause) {
BooleanClause.Occur occur = clause.getOccur();
if (negate) {
switch (occur) {
case SHOULD:
occur = BooleanClause.Occur.MUST_NOT;
break;
case MUST:
occur = BooleanClause.Occur.SHOULD;
break;
case MUST_NOT:
occur = BooleanClause.Occur.SHOULD;
break;
}
}
return occur;
}
private Collection<Pipe> processAndClauses(Map<BooleanClause.Occur, Collection<BooleanClause>> groupedClauses) {
Collection<BooleanClause> andClauses = groupedClauses.get(BooleanClause.Occur.MUST);
Collection<Pipe> andPipes = new ArrayList<>();
if (andClauses != null) {
for (BooleanClause andClause : andClauses) {
QueryExpression queryExpression = queryFactory.create(andClause.getQuery(), resourceDefinition);
properties.addAll(queryExpression.getProperties());
andPipes.add(queryExpression.asPipe());
}
}
return andPipes;
}
private Collection<Pipe> processOrClauses(Map<BooleanClause.Occur, Collection<BooleanClause>> groupedClauses) {
Collection<BooleanClause> shouldClauses = groupedClauses.get(BooleanClause.Occur.SHOULD);
Collection<Pipe> orPipes = new ArrayList<>();
if (shouldClauses != null) {
for (BooleanClause shouldClause : shouldClauses) {
QueryExpression queryExpression = queryFactory.create(shouldClause.getQuery(), resourceDefinition);
// don't negate expression if we negated MUST_NOT -> SHOULD
if (negate && shouldClause.getOccur() != BooleanClause.Occur.MUST_NOT) {
queryExpression.setNegate();
}
properties.addAll(queryExpression.getProperties());
orPipes.add(queryExpression.asPipe());
}
}
return orPipes;
}
private Collection<Pipe> processNotClauses(Map<BooleanClause.Occur, Collection<BooleanClause>> groupedClauses) {
Collection<BooleanClause> notClauses = groupedClauses.get(BooleanClause.Occur.MUST_NOT);
Collection<Pipe> notPipes = new ArrayList<>();
if (notClauses != null) {
for (BooleanClause notClause : notClauses) {
QueryExpression queryExpression = queryFactory.create(notClause.getQuery(), resourceDefinition);
queryExpression.setNegate();
properties.addAll(queryExpression.getProperties());
notPipes.add(queryExpression.asPipe());
}
}
return notPipes;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.search.PrefixQuery;
/**
* Expression that evaluates whether a property starts with a prefix.
*/
public class PrefixQueryExpression extends BaseQueryExpression {
// query 'f*' results in a PrefixQuery
public PrefixQueryExpression(PrefixQuery query, ResourceDefinition resourceDefinition) {
super(query.getPrefix().field(), query.getPrefix().text(), resourceDefinition);
}
@Override
public boolean evaluate(Object value) {
return value != null && String.valueOf(value).startsWith(getExpectedValue());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.thinkaurelius.titan.core.attribute.Text;
import com.tinkerpop.gremlin.java.GremlinPipeline;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.filter.FilterFunctionPipe;
import org.apache.atlas.catalog.VertexWrapper;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.projection.ProjectionResult;
import org.apache.atlas.catalog.projection.Relation;
import java.util.*;
/**
* Query expression wrapper which handles projection queries.
*/
public class ProjectionQueryExpression extends BaseQueryExpression {
private final QueryExpression underlyingExpression;
private final ResourceDefinition resourceDefinition;
private final String[] fieldSegments;
protected ProjectionQueryExpression(QueryExpression underlyingExpression, ResourceDefinition resourceDefinition) {
super(underlyingExpression.getField(), underlyingExpression.getExpectedValue(), resourceDefinition);
this.underlyingExpression = underlyingExpression;
this.resourceDefinition = resourceDefinition;
this.fieldSegments = getField().split(QueryFactory.PATH_SEP_TOKEN);
}
@Override
public Pipe asPipe() {
//todo: encapsulate all of this path logic including path sep escaping and normalizing
final int sepIdx = getField().indexOf(QueryFactory.PATH_SEP_TOKEN);
final String edgeToken = getField().substring(0, sepIdx);
GremlinPipeline pipeline = new GremlinPipeline();
Relation relation = resourceDefinition.getRelations().get(fieldSegments[0]);
if (relation != null) {
pipeline = pipeline.outE();
pipeline.add(relation.asPipe()).inV();
} else {
if (resourceDefinition.getProjections().get(fieldSegments[0]) != null) {
return super.asPipe();
} else {
//todo: default Relation implementation
pipeline = pipeline.outE().has("label", Text.REGEX, String.format(".*\\.%s", edgeToken)).inV();
}
}
//todo: set resource definition from relation on underlying expression where appropriate
String childFieldName = getField().substring(sepIdx + QueryFactory.PATH_SEP_TOKEN.length());
underlyingExpression.setField(childFieldName);
Pipe childPipe;
if (childFieldName.contains(QueryFactory.PATH_SEP_TOKEN)) {
childPipe = new ProjectionQueryExpression(underlyingExpression, resourceDefinition).asPipe();
} else {
childPipe = underlyingExpression.asPipe();
}
pipeline.add(childPipe);
return negate ? new FilterFunctionPipe(new ExcludePipeFunction(pipeline)) : pipeline;
}
@Override
public boolean evaluate(VertexWrapper vWrapper) {
boolean result = false;
Iterator<ProjectionResult> projectionIterator = resourceDefinition.getProjections().
get(fieldSegments[0]).values(vWrapper).iterator();
while (! result && projectionIterator.hasNext()) {
ProjectionResult projectionResult = projectionIterator.next();
for (Map<String, Object> propertyMap : projectionResult.getPropertyMaps()) {
Object val = propertyMap.get(fieldSegments[1]);
if (val != null && underlyingExpression.evaluate(QueryFactory.escape(val))) {
result = true;
break;
}
}
}
return negate ^ result;
}
private static class ExcludePipeFunction implements PipeFunction<Object, Boolean> {
private final GremlinPipeline excludePipeline;
public ExcludePipeFunction(GremlinPipeline excludePipeline) {
this.excludePipeline = excludePipeline;
}
@Override
public Boolean compute(Object vertices) {
GremlinPipeline p = new GremlinPipeline(Collections.singleton(vertices));
p.add(excludePipeline);
return p.gather().toList().isEmpty();
}
}
protected QueryExpression getUnderlyingExpression() {
return underlyingExpression;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import com.tinkerpop.pipes.Pipe;
import org.apache.atlas.catalog.VertexWrapper;
import java.util.Collection;
/**
* Represents a query expression.
*/
public interface QueryExpression {
/**
* Evaluate the expression based on properties of the provied vertex.
*
* @param vWrapper vertex wrapper that expression is applied to
* @return result of expression evaluation
*/
boolean evaluate(VertexWrapper vWrapper);
/**
* Evaluate the expression based on the provided value.
*
* @param value value used to evaluate expression
* @return
*/
boolean evaluate(Object value);
/**
* Get the complete set of properties which are contained in the expression.
*
* @return collection of expression properties
*/
Collection<String> getProperties();
/**
* Get the pipe representation of the expression.
*
* @return pipe representation
*/
Pipe asPipe();
/**
* Negate the expression.
*/
void setNegate();
/**
* Get the negate status of the expression.
*
* @return true if the expression is negated, false otherwise
*/
boolean isNegate();
/**
* Determine whether the expression is being applied to a projection.
*
* @return true if expression is being applied to a projection, false otherwise
*/
boolean isProjectionExpression();
/**
* Get the field name used in the expression.
*
* @return expression field name or null if there is no field name
*/
String getField();
/**
* Set the expressions field name.
*
* @param fieldName field name
*/
public void setField(String fieldName);
/**
* Get the expected value for the expression.
*
* @return expected value or null if there isn't a expected value
*/
String getExpectedValue();
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TermPath;
import org.apache.atlas.catalog.definition.*;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.catalog.exception.InvalidQueryException;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.sandbox.queries.regex.RegexQuery;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
/**
* Factory used to create QueryAdapter instances.
*/
public class QueryFactory {
private static final Logger LOG = LoggerFactory.getLogger(QueryFactory.class);
public static final String PATH_SEP_TOKEN = "__slash__";
private final Map<Class<? extends Query>, ExpressionCreateFunction<? extends Query>>
expressionCreateFunctions = new HashMap<>();
public QueryFactory() {
registerExpressionCreateFunctions();
}
public AtlasQuery createTaxonomyQuery(Request request) throws InvalidQueryException {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
QueryExpression queryExpression = create(request, taxonomyDefinition);
return new AtlasTaxonomyQuery(queryExpression, taxonomyDefinition, request);
}
public AtlasQuery createTermQuery(Request request) throws InvalidQueryException {
ResourceDefinition termDefinition = new TermResourceDefinition();
QueryExpression queryExpression = create(request, termDefinition);
TermPath termPath = request.getProperty("termPath");
return new AtlasTermQuery(queryExpression, termDefinition, termPath, request);
}
public AtlasQuery createEntityQuery(Request request) throws InvalidQueryException {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
QueryExpression queryExpression = create(request, entityDefinition);
return new AtlasEntityQuery(queryExpression, entityDefinition, request);
}
public AtlasQuery createEntityTagQuery(Request request) throws InvalidQueryException {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
QueryExpression queryExpression = create(request, entityTagDefinition);
String guid = request.getProperty("id");
return new AtlasEntityTagQuery(queryExpression, entityTagDefinition, guid, request);
}
private QueryExpression create(Request request, ResourceDefinition resourceDefinition) throws InvalidQueryException {
String queryString;
if (request.getCardinality() == Request.Cardinality.INSTANCE) {
String idPropertyName = resourceDefinition.getIdPropertyName();
queryString = String.format("%s:%s", idPropertyName, request.<String>getProperty(idPropertyName));
} else {
queryString = request.getQueryString();
}
QueryExpression queryExpression;
if (queryString != null && !queryString.isEmpty()) {
QueryParser queryParser = new QueryParser(Version.LUCENE_48, "name", new KeywordAnalyzer());
queryParser.setLowercaseExpandedTerms(false);
Query query;
try {
query = queryParser.parse((String) escape(queryString));
} catch (ParseException e) {
throw new InvalidQueryException(e.getMessage());
}
LOG.info("LuceneQuery: " + query);
queryExpression = create(query, resourceDefinition);
} else {
queryExpression = new AlwaysQueryExpression();
}
// add query properties to request so that they are returned
request.addAdditionalSelectProperties(queryExpression.getProperties());
return queryExpression;
}
@SuppressWarnings("unchecked")
protected <T extends Query> QueryExpression create(T query, ResourceDefinition resourceDefinition) {
if (! expressionCreateFunctions.containsKey(query.getClass())) {
throw new CatalogRuntimeException("Query type currently not supported: " + query.getClass(), 400);
}
//todo: fix generic typing
ExpressionCreateFunction expressionCreateFunction = expressionCreateFunctions.get(query.getClass());
return expressionCreateFunction.createExpression(query, resourceDefinition);
}
// "escapes" characters as necessary for lucene parser
//todo: currently '/' characters are blindly being replaced but this will not allow regex queries to be used
protected static Object escape(Object val) {
if (val instanceof String) {
return ((String)val).replaceAll("/", PATH_SEP_TOKEN);
} else {
return val;
}
}
private abstract static class ExpressionCreateFunction<T extends Query> {
QueryExpression createExpression(T query, ResourceDefinition resourceDefinition) {
QueryExpression expression = create(query, resourceDefinition);
return expression.isProjectionExpression() ?
new ProjectionQueryExpression(expression, resourceDefinition) :
expression;
}
protected abstract QueryExpression create(T query, ResourceDefinition resourceDefinition);
}
private void registerExpressionCreateFunctions() {
expressionCreateFunctions.put(WildcardQuery.class, new ExpressionCreateFunction<WildcardQuery>() {
@Override
public QueryExpression create(WildcardQuery query, ResourceDefinition definition) {
return new WildcardQueryExpression(query, definition);
}
});
expressionCreateFunctions.put(PrefixQuery.class, new ExpressionCreateFunction<PrefixQuery>() {
@Override
public QueryExpression create(PrefixQuery query, ResourceDefinition definition) {
return new PrefixQueryExpression(query, definition);
}
});
expressionCreateFunctions.put(TermQuery.class, new ExpressionCreateFunction<TermQuery>() {
@Override
public QueryExpression create(TermQuery query, ResourceDefinition definition) {
return new TermQueryExpression(query, definition);
}
});
expressionCreateFunctions.put(TermRangeQuery.class, new ExpressionCreateFunction<TermRangeQuery>() {
@Override
public QueryExpression create(TermRangeQuery query, ResourceDefinition definition) {
return new TermRangeQueryExpression(query, definition);
}
});
expressionCreateFunctions.put(RegexQuery.class, new ExpressionCreateFunction<RegexQuery>() {
@Override
public QueryExpression create(RegexQuery query, ResourceDefinition definition) {
return new RegexQueryExpression(query, definition);
}
});
expressionCreateFunctions.put(BooleanQuery.class, new ExpressionCreateFunction<BooleanQuery>() {
@Override
public QueryExpression create(BooleanQuery query, ResourceDefinition definition) {
return new BooleanQueryExpression(query, definition, QueryFactory.this);
}
});
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.sandbox.queries.regex.RegexQuery;
import java.util.regex.Pattern;
/**
* Query expression which evaluates a property against a regular expression.
*/
public class RegexQueryExpression extends BaseQueryExpression {
public RegexQueryExpression(RegexQuery query, ResourceDefinition resourceDefinition) {
super(query.getField(), query.getTerm().text(), resourceDefinition);
}
@Override
public boolean evaluate(Object value) {
Pattern p = Pattern.compile(getExpectedValue());
return value != null && p.matcher(String.valueOf(value)).matches();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.search.TermQuery;
import java.util.Collection;
/**
* Query expression which evaluates whether a property equals a value.
*/
public class TermQueryExpression extends BaseQueryExpression {
public TermQueryExpression(TermQuery query, ResourceDefinition resourceDefinition) {
super(query.getTerm().field(), query.getTerm().text(), resourceDefinition);
}
@Override
public boolean evaluate(Object value) {
String expectedValue = getExpectedValue();
if (value == null) {
return expectedValue.equals("null");
//todo: refactor; we shouldn't need to use instanceof/cast here
} else if (value instanceof Collection) {
return ((Collection)value).contains(expectedValue);
} else {
return expectedValue.equals(QueryFactory.escape(String.valueOf(value)));
}
}
public String getExpectedValue() {
return m_expectedValue;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
/**
* Query expression which evaluates whether a property value is within a range.
*/
//todo: for month and year which are expressed via a single digit, must ensure that
//todo: a leading '0' is provided. For example, "2016-1-5" must be converted to "2016-01-05".
//todo: Month and day values aren't currently validated.
public class TermRangeQueryExpression extends BaseQueryExpression {
private final BytesRef m_lowerTerm;
private final BytesRef m_upperTerm;
private final boolean m_lowerInclusive;
private final boolean m_upperInclusive;
public TermRangeQueryExpression(TermRangeQuery query, ResourceDefinition resourceDefinition) {
super(query.getField(), null, resourceDefinition);
m_lowerTerm = query.getLowerTerm();
m_upperTerm = query.getUpperTerm();
m_lowerInclusive = query.includesLower();
m_upperInclusive = query.includesUpper();
}
@Override
public boolean evaluate(Object value) {
BytesRef valueBytes = new BytesRef(String.valueOf(value));
return compareLowerBound(valueBytes) && compareUpperBound(valueBytes);
}
private boolean compareLowerBound(BytesRef valueBytes) {
return m_lowerTerm == null || (m_lowerInclusive ? valueBytes.compareTo(m_lowerTerm) > 0 :
valueBytes.compareTo(m_lowerTerm) >= 0);
}
private boolean compareUpperBound(BytesRef valueBytes) {
return m_upperTerm == null || (m_upperInclusive ? valueBytes.compareTo(m_upperTerm) < 0 :
valueBytes.compareTo(m_upperTerm) <= 0);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.lucene.search.WildcardQuery;
import java.util.regex.Pattern;
/**
* Query expression which evaluates values with wildcards.
* This differs from PrefixQueryExpression which handles expressions which end with a wildcard.
*/
public class WildcardQueryExpression extends BaseQueryExpression {
public WildcardQueryExpression(WildcardQuery query, ResourceDefinition resourceDefinition) {
super(query.getTerm().field(), query.getTerm().text(), resourceDefinition);
}
@Override
public boolean evaluate(Object value) {
// replace '*' with ".*"
// replace '?' with '.'
String regex = getExpectedValue().replaceAll("\\*", ".*").replaceAll("\\?", ".");
return Pattern.compile(regex).matcher(String.valueOf(value)).matches();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.testng.annotations.Test;
import java.util.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for CollectionRequest.
*/
public class CollectionRequestTest {
@Test
public void testNoProperties() {
String query = "name:foo*";
Request request = new CollectionRequest(null, query);
assertEquals(Request.Cardinality.COLLECTION, request.getCardinality());
assertTrue(request.getProperties().isEmpty());
assertNull(request.getProperty("foo"));
assertTrue(request.getAdditionalSelectProperties().isEmpty());
}
@Test
public void testWithProperties() {
String query = "name:foo*";
Map<String, Object> properties = new HashMap<>();
properties.put("foo", "fooValue");
properties.put("someBoolean", true);
Request request = new CollectionRequest(properties, query);
assertEquals(Request.Cardinality.COLLECTION, request.getCardinality());
assertEquals(properties, request.getProperties());
assertEquals("fooValue", request.getProperty("foo"));
assertTrue(request.<Boolean>getProperty("someBoolean"));
assertNull(request.getProperty("other"));
assertTrue(request.getAdditionalSelectProperties().isEmpty());
}
@Test
public void testSelectProperties() {
String query = "name:foo*";
Request request = new CollectionRequest(null, query);
Collection<String> additionalSelectProps = new ArrayList<>();
additionalSelectProps.add("foo");
additionalSelectProps.add("bar");
request.addAdditionalSelectProperties(additionalSelectProps);
Collection<String> requestAdditionalSelectProps = request.getAdditionalSelectProperties();
assertEquals(2, requestAdditionalSelectProps.size());
assertTrue(requestAdditionalSelectProps.contains("foo"));
assertTrue(requestAdditionalSelectProps.contains("bar"));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.testng.annotations.Test;
import java.util.Calendar;
import java.util.GregorianCalendar;
import static org.testng.Assert.assertEquals;
/**
* Unit tests for DefaultDateFormatter.
*/
public class DefaultDateFormatterTest {
@Test
public void test() {
Calendar calendar = new GregorianCalendar(2016, 0, 20, 5, 10, 15);
long millis = calendar.getTimeInMillis();
DefaultDateFormatter dateFormatter = new DefaultDateFormatter();
// month starts at 0 so we need to add 1
assertEquals("2016-01-20:05:10:15", dateFormatter.format(millis));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.FieldMapping;
import org.apache.atlas.typesystem.types.HierarchicalType;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.Map;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.assertEquals;
/**
* Unit tests for DefaultPropertyMapper.
*/
public class DefaultPropertyMapperTest {
@Test
public void testToCleanName_defaultMappings() {
String typeName = "testType";
HierarchicalType dataType = createNiceMock(HierarchicalType.class);
// currently only use key in map
Map<String, AttributeInfo> fields = new HashMap<>();
fields.put("foo", null);
fields.put("prop", null);
// can't mock FieldMapping due to direct access to final instance var 'fields'
FieldMapping fieldMapping = new FieldMapping(fields, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
// mock expectations
expect(dataType.fieldMapping()).andReturn(fieldMapping).anyTimes();
replay(dataType);
PropertyMapper propertyMapper = new TestDefaultPropertyMapper(dataType);
assertEquals(propertyMapper.toCleanName("Prefix.prop", typeName), "prop");
assertEquals(propertyMapper.toCleanName("foo", typeName), "foo");
assertEquals(propertyMapper.toCleanName("other", typeName), "other");
assertEquals(propertyMapper.toCleanName("Prefix.other", typeName), "Prefix.other");
verify(dataType);
}
@Test
public void testToQualifiedName_defaultMappings() throws Exception {
String typeName = "testType";
HierarchicalType dataType = createNiceMock(HierarchicalType.class);
// currently only use key in map
Map<String, AttributeInfo> fields = new HashMap<>();
fields.put("foo", null);
fields.put("prop", null);
// can't mock FieldMapping due to direct access to final instance var 'fields'
FieldMapping fieldMapping = new FieldMapping(fields, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
// mock expectations
expect(dataType.fieldMapping()).andReturn(fieldMapping).anyTimes();
expect(dataType.getQualifiedName("foo")).andReturn("foo");
expect(dataType.getQualifiedName("prop")).andReturn("Prefix.prop");
replay(dataType);
PropertyMapper propertyMapper = new TestDefaultPropertyMapper(dataType);
assertEquals(propertyMapper.toFullyQualifiedName("foo", typeName), "foo");
assertEquals(propertyMapper.toFullyQualifiedName("prop", typeName), "Prefix.prop");
assertEquals(propertyMapper.toFullyQualifiedName("other", typeName), "other");
assertEquals(propertyMapper.toFullyQualifiedName("Prefix.other", typeName), "Prefix.other");
verify(dataType);
}
@Test
public void testToCleanName_specifiedMappings() {
String typeName = "testType";
HierarchicalType dataType = createNiceMock(HierarchicalType.class);
// currently only use key in map
Map<String, AttributeInfo> fields = new HashMap<>();
fields.put("foo", null);
fields.put("prop", null);
// can't mock FieldMapping due to direct access to final instance var 'fields'
FieldMapping fieldMapping = new FieldMapping(fields, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
// mock expectations
expect(dataType.fieldMapping()).andReturn(fieldMapping).anyTimes();
replay(dataType);
Map<String, String> cleanToQualifiedMap = new HashMap<>();
cleanToQualifiedMap.put("prop1", "property_1");
Map<String, String> qualifiedToCleanMap = new HashMap<>();
qualifiedToCleanMap.put("property_1", "prop1");
PropertyMapper propertyMapper = new TestDefaultPropertyMapper(
typeName, qualifiedToCleanMap, cleanToQualifiedMap, dataType);
assertEquals(propertyMapper.toCleanName("property_1", typeName), "prop1");
assertEquals(propertyMapper.toCleanName("Prefix.prop", typeName), "prop");
assertEquals(propertyMapper.toCleanName("foo", typeName), "foo");
assertEquals(propertyMapper.toCleanName("other", typeName), "other");
assertEquals(propertyMapper.toCleanName("Prefix.other", typeName), "Prefix.other");
verify(dataType);
}
@Test
public void testToQualifiedName_specifiedMappings() throws Exception {
String typeName = "testType";
HierarchicalType dataType = createNiceMock(HierarchicalType.class);
// currently only use key in map
Map<String, AttributeInfo> fields = new HashMap<>();
fields.put("foo", null);
fields.put("prop", null);
// can't mock FieldMapping due to direct access to final instance var 'fields'
FieldMapping fieldMapping = new FieldMapping(fields, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
// mock expectations
expect(dataType.fieldMapping()).andReturn(fieldMapping).anyTimes();
expect(dataType.getQualifiedName("foo")).andReturn("foo");
expect(dataType.getQualifiedName("prop")).andReturn("Prefix.prop");
replay(dataType);
Map<String, String> cleanToQualifiedMap = new HashMap<>();
cleanToQualifiedMap.put("prop1", "property_1");
Map<String, String> qualifiedToCleanMap = new HashMap<>();
qualifiedToCleanMap.put("property_1", "prop1");
PropertyMapper propertyMapper = new TestDefaultPropertyMapper(
typeName, qualifiedToCleanMap, cleanToQualifiedMap, dataType);
assertEquals(propertyMapper.toFullyQualifiedName("prop1", typeName), "property_1");
assertEquals(propertyMapper.toFullyQualifiedName("foo", typeName), "foo");
assertEquals(propertyMapper.toFullyQualifiedName("prop", typeName), "Prefix.prop");
assertEquals(propertyMapper.toFullyQualifiedName("other", typeName), "other");
assertEquals(propertyMapper.toFullyQualifiedName("Prefix.other", typeName), "Prefix.other");
verify(dataType);
}
private static class TestDefaultPropertyMapper extends DefaultPropertyMapper {
private HierarchicalType dataType;
public TestDefaultPropertyMapper(HierarchicalType dataType) {
super();
this.dataType = dataType;
}
public TestDefaultPropertyMapper(String type,
Map<String, String> qualifiedToCleanMap,
Map<String, String> cleanToQualifiedMap,
HierarchicalType dataType) {
super(qualifiedToCleanMap, cleanToQualifiedMap);
this.dataType = dataType;
}
@Override
protected HierarchicalType createDataType(String type) {
return dataType;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.AtlasQuery;
import org.apache.atlas.catalog.query.QueryFactory;
import org.easymock.Capture;
import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit Tests for EntityResourceProvider.
*/
public class EntityResourceProviderTest {
@Test
public void testGetResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("id", "1");
queryResultRow.put("creation_time", "04/20/2016");
// mock expectations
expect(queryFactory.createEntityQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request userRequest = new InstanceRequest(requestProperties);
Result result = provider.getResourceById(userRequest);
assertEquals(1, result.getPropertyMaps().size());
assertEquals(queryResultRow, result.getPropertyMaps().iterator().next());
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(requestProperties, request.getProperties());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = ResourceNotFoundException.class)
public void testGetResource_404() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty response should result in a ResourceNotFoundException
Collection<Map<String, Object>> emptyResponse = new ArrayList<>();
// mock expectations
expect(queryFactory.createEntityQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(emptyResponse);
replay(typeSystem, queryFactory, query);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request request = new InstanceRequest(requestProperties);
provider.getResourceById(request);
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow1 = new HashMap<>();
queryResult.add(queryResultRow1);
queryResultRow1.put("mame", "entity1");
queryResultRow1.put("description", "test entity description");
queryResultRow1.put("creation_time", "04/20/2016");
Map<String, Object> queryResultRow2 = new HashMap<>();
queryResult.add(queryResultRow2);
queryResultRow2.put("mame", "entity2");
queryResultRow2.put("description", "test entity description 2");
queryResultRow2.put("creation_time", "04/21/2016");
// mock expectations
expect(queryFactory.createEntityQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Request userRequest = new CollectionRequest(Collections.<String, Object>emptyMap(), "name:entity*");
Result result = provider.getResources(userRequest);
assertEquals(2, result.getPropertyMaps().size());
assertTrue(result.getPropertyMaps().contains(queryResultRow1));
assertTrue(result.getPropertyMaps().contains(queryResultRow2));
Request request = requestCapture.getValue();
assertEquals("name:entity*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(0, request.getProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources_noResults() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty result shouldn't result in exception for collection query
Collection<Map<String, Object>> queryResult = new ArrayList<>();
// mock expectations
expect(queryFactory.createEntityQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Request userRequest = new CollectionRequest(Collections.<String, Object>emptyMap(), "name:entity*");
Result result = provider.getResources(userRequest);
assertEquals(0, result.getPropertyMaps().size());
Request request = requestCapture.getValue();
assertEquals("name:entity*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(0, request.getProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testCreateResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
// mock expectations
replay(typeSystem, queryFactory, query);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request userRequest = new InstanceRequest(requestProperties);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testCreateResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
// mock expectations
replay(typeSystem, queryFactory, query);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request userRequest = new InstanceRequest(requestProperties);
EntityResourceProvider provider = new EntityResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResources(userRequest);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.exception.CatalogException;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.AtlasQuery;
import org.apache.atlas.catalog.query.QueryFactory;
import org.easymock.Capture;
import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.replay;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for EntityTagResourceProvider.
*/
public class EntityTagResourceProviderTest {
@Test
public void testGetResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "taxonomyName.termName");
queryResultRow.put("description", "test term description");
// mock expectations
expect(queryFactory.createEntityTagQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName.termName");
requestProperties.put("id", "1");
Request userRequest = new InstanceRequest(requestProperties);
Result result = provider.getResourceById(userRequest);
assertEquals(1, result.getPropertyMaps().size());
assertEquals(queryResultRow, result.getPropertyMaps().iterator().next());
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(2, request.getProperties().size());
assertEquals("taxonomyName.termName", request.getProperties().get("name"));
assertEquals(Request.Cardinality.INSTANCE, request.getCardinality());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = ResourceNotFoundException.class)
public void testGetResource_404() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty response should result in a ResourceNotFoundException
Collection<Map<String, Object>> emptyResponse = new ArrayList<>();
// mock expectations
expect(queryFactory.createEntityTagQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(emptyResponse);
replay(typeSystem, queryFactory, query);
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName.termName");
requestProperties.put("id", "1");
Request request = new InstanceRequest(requestProperties);
provider.getResourceById(request);
}
@Test
public void testGetResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow1 = new HashMap<>();
queryResult.add(queryResultRow1);
queryResultRow1.put("name", "testTaxonomy.termName");
queryResultRow1.put("description", "test term description");
Map<String, Object> queryResultRow2 = new HashMap<>();
queryResult.add(queryResultRow2);
queryResultRow2.put("name", "testTaxonomy.termName2");
queryResultRow2.put("description", "test term 2 description");
// mock expectations
expect(queryFactory.createEntityTagQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request userRequest = new CollectionRequest(requestProperties, "name:testTaxonomy.*");
// invoke test method
Result result = provider.getResources(userRequest);
assertEquals(2, result.getPropertyMaps().size());
assertTrue(result.getPropertyMaps().contains(queryResultRow1));
assertTrue(result.getPropertyMaps().contains(queryResultRow2));
Request request = requestCapture.getValue();
assertEquals("name:testTaxonomy.*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources_noResults() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
// mock expectations
expect(queryFactory.createEntityTagQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("id", "1");
Request userRequest = new CollectionRequest(requestProperties, "name:testTaxonomy.*");
// invoke test method
Result result = provider.getResources(userRequest);
assertEquals(0, result.getPropertyMaps().size());
Request request = requestCapture.getValue();
assertEquals("name:testTaxonomy.*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testCreateResource_invalidRequest__noName() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
replay(typeSystem, queryFactory, query);
Map<String, Object> requestProperties = new HashMap<>();
// missing name name should result in InvalidPayloadException
requestProperties.put("description", "description");
Request userRequest = new InstanceRequest(requestProperties);
EntityTagResourceProvider provider = new EntityTagResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
}
@Test
public void testCreateResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
ResourceProvider termResourceProvider = createStrictMock(TermResourceProvider.class);
Capture<Request> termRequestCapture = newCapture();
Collection<Map<String, Object>> termQueryResult = new ArrayList<>();
Map<String, Object> termQueryResultRow = new HashMap<>();
termQueryResult.add(termQueryResultRow);
termQueryResultRow.put("name", "testTaxonomy.termName");
termQueryResultRow.put("type", "testTaxonomy.termName");
termQueryResultRow.put("available_as_tag", true);
termQueryResultRow.put("description", "term description");
Result termResult = new Result(termQueryResult);
// mock expectations
expect(termResourceProvider.getResourceById(capture(termRequestCapture))).andReturn(termResult);
Map<String, Object> tagProperties = new HashMap<>();
tagProperties.put("name", "testTaxonomy.termName");
tagProperties.put("description", "term description");
typeSystem.createTraitInstance("11-22-33", "testTaxonomy.termName", tagProperties);
replay(typeSystem, queryFactory, query, termResourceProvider);
EntityTagResourceProvider provider = new TestEntityTagResourceProvider(typeSystem, termResourceProvider);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "testTaxonomy.termName");
requestProperties.put("id", "11-22-33");
Request userRequest = new InstanceRequest(requestProperties);
provider.createResource(userRequest);
Request termRequest = termRequestCapture.getValue();
Map<String, Object> termRequestProps = termRequest.getProperties();
assertEquals(1, termRequestProps.size());
TermPath termPath = (TermPath) termRequestProps.get("termPath");
assertEquals("testTaxonomy.termName", termPath.getFullyQualifiedName());
assertEquals(1, termRequest.getAdditionalSelectProperties().size());
assertEquals("type", termRequest.getAdditionalSelectProperties().iterator().next());
assertNull(termRequest.getQueryString());
verify(typeSystem, queryFactory, query, termResourceProvider);
}
@Test(expectedExceptions = CatalogException.class)
public void testCreateResource_invalidRequest__termNotAvailableForTagging() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
ResourceProvider termResourceProvider = createStrictMock(TermResourceProvider.class);
Capture<Request> termRequestCapture = newCapture();
Collection<Map<String, Object>> termQueryResult = new ArrayList<>();
Map<String, Object> termQueryResultRow = new HashMap<>();
termQueryResult.add(termQueryResultRow);
termQueryResultRow.put("name", "testTaxonomy.termName");
termQueryResultRow.put("type", "testTaxonomy.termName");
// false value for 'available_as_tag' should result in an exception
termQueryResultRow.put("available_as_tag", false);
termQueryResultRow.put("description", "term description");
Result termResult = new Result(termQueryResult);
// mock expectations
expect(termResourceProvider.getResourceById(capture(termRequestCapture))).andReturn(termResult);
replay(typeSystem, queryFactory, query, termResourceProvider);
EntityTagResourceProvider provider = new TestEntityTagResourceProvider(typeSystem, termResourceProvider);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "testTaxonomy.termName");
requestProperties.put("id", "11-22-33");
Request userRequest = new InstanceRequest(requestProperties);
provider.createResource(userRequest);
}
@Test(expectedExceptions = ResourceAlreadyExistsException.class)
public void testCreateResource_invalidRequest__alreadyExists() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
ResourceProvider termResourceProvider = createStrictMock(TermResourceProvider.class);
Capture<Request> termRequestCapture = newCapture();
Collection<Map<String, Object>> termQueryResult = new ArrayList<>();
Map<String, Object> termQueryResultRow = new HashMap<>();
termQueryResult.add(termQueryResultRow);
termQueryResultRow.put("name", "testTaxonomy.termName");
termQueryResultRow.put("type", "testTaxonomy.termName");
termQueryResultRow.put("available_as_tag", true);
termQueryResultRow.put("description", "term description");
Result termResult = new Result(termQueryResult);
// mock expectations
expect(termResourceProvider.getResourceById(capture(termRequestCapture))).andReturn(termResult);
Map<String, Object> tagProperties = new HashMap<>();
tagProperties.put("name", "testTaxonomy.termName");
tagProperties.put("description", "term description");
typeSystem.createTraitInstance("11-22-33", "testTaxonomy.termName", tagProperties);
expectLastCall().andThrow(new ResourceAlreadyExistsException(""));
replay(typeSystem, queryFactory, query, termResourceProvider);
EntityTagResourceProvider provider = new TestEntityTagResourceProvider(typeSystem, termResourceProvider);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "testTaxonomy.termName");
requestProperties.put("id", "11-22-33");
Request userRequest = new InstanceRequest(requestProperties);
provider.createResource(userRequest);
}
@Test
public void testCreateResources() throws Exception {
AtlasTypeSystem typeSystem = createMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery entityQuery = createMock(AtlasQuery.class);
ResourceProvider termResourceProvider = createMock(TermResourceProvider.class);
Capture<Request> entityRequestCapture = newCapture();
Capture<Request> termRequestCapture1 = newCapture();
Capture<Request> termRequestCapture2 = newCapture();
Collection<Map<String, Object>> entityQueryResult = new ArrayList<>();
Map<String, Object> entityQueryResultRow = new HashMap<>();
entityQueryResultRow.put("id", "1");
entityQueryResult.add(entityQueryResultRow);
Map<String, Object> entityQueryResultRow2 = new HashMap<>();
entityQueryResultRow2.put("id", "2");
entityQueryResult.add(entityQueryResultRow2);
Collection<Map<String, Object>> termQueryResult1 = new ArrayList<>();
Map<String, Object> termQueryResultRow1 = new HashMap<>();
termQueryResult1.add(termQueryResultRow1);
termQueryResultRow1.put("name", "testTaxonomy.termName1");
termQueryResultRow1.put("type", "testTaxonomy.termName1");
termQueryResultRow1.put("available_as_tag", true);
termQueryResultRow1.put("description", "term description");
Result termResult1 = new Result(termQueryResult1);
Collection<Map<String, Object>> termQueryResult2 = new ArrayList<>();
Map<String, Object> termQueryResultRow2 = new HashMap<>();
termQueryResult2.add(termQueryResultRow2);
termQueryResultRow2.put("name", "testTaxonomy.termName2");
termQueryResultRow2.put("type", "testTaxonomy.termName2");
termQueryResultRow2.put("available_as_tag", true);
termQueryResultRow2.put("description", "term 2 description");
Result termResult2 = new Result(termQueryResult2);
// mock expectations
expect(queryFactory.createEntityQuery(capture(entityRequestCapture))).andReturn(entityQuery);
expect(entityQuery.execute()).andReturn(entityQueryResult);
expect(termResourceProvider.getResourceById(capture(termRequestCapture1))).andReturn(termResult1);
expect(termResourceProvider.getResourceById(capture(termRequestCapture2))).andReturn(termResult2);
Map<String, Object> tagProperties1 = new HashMap<>();
tagProperties1.put("name", "testTaxonomy.termName1");
tagProperties1.put("description", "term description");
// each tag is associated with each entity
typeSystem.createTraitInstance("1", "testTaxonomy.termName1", tagProperties1);
typeSystem.createTraitInstance("2", "testTaxonomy.termName1", tagProperties1);
Map<String, Object> tagProperties2 = new HashMap<>();
tagProperties2.put("name", "testTaxonomy.termName2");
tagProperties2.put("description", "term 2 description");
// each tag is associated with each entity
typeSystem.createTraitInstance("1", "testTaxonomy.termName2", tagProperties2);
typeSystem.createTraitInstance("2", "testTaxonomy.termName2", tagProperties2);
replay(typeSystem, queryFactory, entityQuery, termResourceProvider);
// end mock expectations
EntityTagResourceProvider provider = new TestEntityTagResourceProvider(typeSystem, termResourceProvider);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProps = new HashMap<>();
Collection<Map<String, String>> tagMaps = new ArrayList<>();
requestProps.put("tags", tagMaps);
Map<String, String> tagMap1 = new HashMap<>();
tagMap1.put("name", "testTaxonomy.termName1");
tagMaps.add(tagMap1);
Map<String, String> tagMap2 = new HashMap<>();
tagMap2.put("name", "testTaxonomy.termName2");
tagMaps.add(tagMap2);
Request userRequest = new CollectionRequest(requestProps, "name:foo*");
// invoke method being tested
Collection<String> createResult = provider.createResources(userRequest);
assertEquals(4, createResult.size());
assertTrue(createResult.contains("v1/entities/1/tags/testTaxonomy.termName1"));
assertTrue(createResult.contains("v1/entities/1/tags/testTaxonomy.termName2"));
assertTrue(createResult.contains("v1/entities/2/tags/testTaxonomy.termName1"));
assertTrue(createResult.contains("v1/entities/2/tags/testTaxonomy.termName2"));
Request entityRequest = entityRequestCapture.getValue();
assertEquals("name:foo*", entityRequest.getQueryString());
assertEquals(Request.Cardinality.COLLECTION, entityRequest.getCardinality());
Request termRequest1 = termRequestCapture1.getValue();
assertNull(termRequest1.getQueryString());
assertEquals(Request.Cardinality.INSTANCE, termRequest1.getCardinality());
Map<String, Object> termRequestProps = termRequest1.getProperties();
assertEquals(1, termRequestProps.size());
TermPath termPath = (TermPath) termRequestProps.get("termPath");
assertEquals("testTaxonomy.termName1", termPath.getFullyQualifiedName());
Request termRequest2 = termRequestCapture2.getValue();
assertNull(termRequest2.getQueryString());
assertEquals(Request.Cardinality.INSTANCE, termRequest2.getCardinality());
Map<String, Object> termRequestProps2 = termRequest2.getProperties();
assertEquals(1, termRequestProps2.size());
TermPath termPath2 = (TermPath) termRequestProps2.get("termPath");
assertEquals("testTaxonomy.termName2", termPath2.getFullyQualifiedName());
verify(typeSystem, queryFactory, entityQuery, termResourceProvider);
}
//todo: test behavior of createResources in case of partial success after behavior is defined
private static class TestEntityTagResourceProvider extends EntityTagResourceProvider {
private ResourceProvider testTermResourceProvider;
public TestEntityTagResourceProvider(AtlasTypeSystem typeSystem, ResourceProvider termResourceProvider) {
super(typeSystem);
testTermResourceProvider = termResourceProvider;
}
@Override
protected synchronized ResourceProvider getTermResourceProvider() {
return testTermResourceProvider;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for InstanceRequest.
*/
public class InstanceRequestTest {
@Test
public void testRequestProperties() {
Map<String, Object> properties = new HashMap<>();
properties.put("foo", "fooValue");
properties.put("someBoolean", true);
Request request = new InstanceRequest(properties);
assertEquals(Request.Cardinality.INSTANCE, request.getCardinality());
assertEquals(properties, request.getProperties());
assertEquals("fooValue", request.getProperty("foo"));
assertTrue(request.<Boolean>getProperty("someBoolean"));
assertNull(request.getProperty("other"));
assertTrue(request.getAdditionalSelectProperties().isEmpty());
}
@Test
public void testSelectProperties() {
Map<String, Object> properties = new HashMap<>();
properties.put("foo", "fooValue");
properties.put("someBoolean", true);
Request request = new InstanceRequest(properties);
Collection<String> additionalSelectProps = new ArrayList<>();
additionalSelectProps.add("prop1");
additionalSelectProps.add("prop2");
request.addAdditionalSelectProperties(additionalSelectProps);
Collection<String> requestAdditionalSelectProps = request.getAdditionalSelectProperties();
assertEquals(2, requestAdditionalSelectProps.size());
assertTrue(requestAdditionalSelectProps.contains("prop1"));
assertTrue(requestAdditionalSelectProps.contains("prop2"));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.testng.annotations.Test;
import javax.ws.rs.core.UriInfo;
import java.net.URI;
import java.util.*;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.testng.Assert.assertEquals;
/**
* Unit tests for JsonSerializer.
*/
public class JsonSerializerTest {
@Test
public void testSerialize() throws Exception {
UriInfo uriInfo = createStrictMock(UriInfo.class);
URI uri = new URI("http://test.com:8080/");
expect(uriInfo.getBaseUri()).andReturn(uri);
replay(uriInfo);
Collection<Map<String, Object>> resultMaps = new ArrayList<>();
// result map 1
ResourceComparator resourceComparator = new ResourceComparator();
Map<String, Object> resultMap1 = new TreeMap<>(resourceComparator);
resultMaps.add(resultMap1);
resultMap1.put("prop1", "property 1 value");
resultMap1.put("booleanProp", true);
resultMap1.put("numberProp", 100);
resultMap1.put("href", "v1/testResources/foo");
ArrayList<String> listProp = new ArrayList<>();
listProp.add("one");
listProp.add("two");
resultMap1.put("listProp", listProp);
Map<String, Object> mapProp = new TreeMap<>(resourceComparator);
mapProp.put("mapProp1", "mapProp1Value");
ArrayList<String> mapListProp = new ArrayList<>();
mapListProp.add("mapListOne");
mapListProp.add("mapListTwo");
mapProp.put("mapListProp", mapListProp);
mapProp.put("href", "v1/testResources/foobar");
resultMap1.put("mapProp", mapProp);
// result map 2
Map<String, Object> resultMap2 = new TreeMap<>(resourceComparator);
resultMaps.add(resultMap2);
resultMap2.put("nullProp", null);
resultMap2.put("href", "v1/testResources/bar");
ArrayList<Map<String, Object>> listProp2 = new ArrayList<>();
listProp2.add(Collections.<String, Object>singletonMap("listMapProp", "listMapPropValue"));
resultMap2.put("listProp", listProp2);
Result result = new Result(resultMaps);
JsonSerializer serializer = new JsonSerializer();
String resultJson = serializer.serialize(result, uriInfo);
assertEquals(resultJson, EXPECTED_JSON);
}
private static final String EXPECTED_JSON =
"[\n" +
" {\n" +
" \"href\": \"http://test.com:8080/v1/testResources/foo\",\n" +
" \"booleanProp\": true,\n" +
" \"numberProp\": 100,\n" +
" \"prop1\": \"property 1 value\",\n" +
" \"listProp\": [\n" +
" \"one\",\n" +
" \"two\"\n" +
" ],\n" +
" \"mapProp\": {\n" +
" \"href\": \"http://test.com:8080/v1/testResources/foobar\",\n" +
" \"mapProp1\": \"mapProp1Value\",\n" +
" \"mapListProp\": [\n" +
" \"mapListOne\",\n" +
" \"mapListTwo\"\n" +
" ]\n" +
" }\n" +
" },\n" +
" {\n" +
" \"href\": \"http://test.com:8080/v1/testResources/bar\",\n" +
" \"nullProp\": null,\n" +
" \"listProp\": [\n" +
" {\n" +
" \"listMapProp\": \"listMapPropValue\"\n" +
" }\n" +
" ]\n" +
" }\n" +
"]";
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.testng.annotations.Test;
import java.util.*;
import static org.testng.Assert.assertEquals;
/**
* Unit tests for ResourceComparator.
*/
public class ResourceComparatorTest {
@Test
public void testCompare() {
Map<String, Object> map = new TreeMap<>(new ResourceComparator());
map.put("a", "zzzzz");
map.put("name", 1);
map.put("z", "fdsfdsds");
map.put("d", new ArrayList<>());
map.put("id", 1);
map.put("e", false);
map.put("c", 1);
map.put("href", "dfdfgdf");
map.put("b", new HashMap<>());
map.put("description", 1);
map.put("f", 20);
map.put("type", 1);
Iterator<String> iter = map.keySet().iterator();
assertEquals(iter.next(), "href");
assertEquals(iter.next(), "name");
assertEquals(iter.next(), "id");
assertEquals(iter.next(), "description");
assertEquals(iter.next(), "type");
assertEquals(iter.next(), "a");
assertEquals(iter.next(), "b");
assertEquals(iter.next(), "c");
assertEquals(iter.next(), "d");
assertEquals(iter.next(), "e");
assertEquals(iter.next(), "f");
assertEquals(iter.next(), "z");
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.definition.TaxonomyResourceDefinition;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.AtlasQuery;
import org.apache.atlas.catalog.query.QueryFactory;
import org.easymock.Capture;
import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for TaxonomyResourceProvider.
*/
public class TaxonomyResourceProviderTest {
@Test
public void testGetResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "taxonomyName");
queryResultRow.put("description", "test taxonomy description");
queryResultRow.put("creation_time", "04/20/2016");
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request userRequest = new InstanceRequest(requestProperties);
Result result = provider.getResourceById(userRequest);
assertEquals(1, result.getPropertyMaps().size());
assertEquals(queryResultRow, result.getPropertyMaps().iterator().next());
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(requestProperties, request.getProperties());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = ResourceNotFoundException.class)
public void testGetResource_404() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty response should result in a ResourceNotFoundException
Collection<Map<String, Object>> emptyResponse = new ArrayList<>();
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(emptyResponse);
replay(typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request request = new InstanceRequest(requestProperties);
provider.getResourceById(request);
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow1 = new HashMap<>();
queryResult.add(queryResultRow1);
queryResultRow1.put("mame", "taxonomyName1");
queryResultRow1.put("description", "test taxonomy description");
queryResultRow1.put("creation_time", "04/20/2016");
Map<String, Object> queryResultRow2 = new HashMap<>();
queryResult.add(queryResultRow2);
queryResultRow2.put("mame", "taxonomyName2");
queryResultRow2.put("description", "test taxonomy description 2");
queryResultRow2.put("creation_time", "04/21/2016");
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Request userRequest = new CollectionRequest(Collections.<String, Object>emptyMap(), "name:taxonomy*");
Result result = provider.getResources(userRequest);
assertEquals(2, result.getPropertyMaps().size());
assertTrue(result.getPropertyMaps().contains(queryResultRow1));
assertTrue(result.getPropertyMaps().contains(queryResultRow2));
Request request = requestCapture.getValue();
assertEquals("name:taxonomy*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(0, request.getProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources_noResults() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty result shouldn't result in exception for collection query
Collection<Map<String, Object>> queryResult = new ArrayList<>();
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TaxonomyResourceProvider provider = new TaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Request userRequest = new CollectionRequest(Collections.<String, Object>emptyMap(), "name:taxonomy*");
Result result = provider.getResources(userRequest);
assertEquals(0, result.getPropertyMaps().size());
Request request = requestCapture.getValue();
assertEquals("name:taxonomy*", request.getQueryString());
assertEquals(0, request.getAdditionalSelectProperties().size());
assertEquals(0, request.getProperties().size());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testCreateResource_invalidRequest__noName() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
// mock expectations
replay(typeSystem, queryFactory, query);
// taxonomy create request must contain 'name' property
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("description", "test");
Request userRequest = new InstanceRequest(requestProperties);
TaxonomyResourceProvider provider = new TaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
}
@Test(expectedExceptions = ResourceAlreadyExistsException.class)
public void testCreateResource_invalidRequest__alreadyExists() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// query is executed to see if resource already exists
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("mame", "taxonomyName");
queryResultRow.put("description", "test taxonomy description");
queryResultRow.put("creation_time", "04/20/2016");
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
// returning result for query should result in ResourceAlreadyExistsException
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
// taxonomy create request must contain 'name' property
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request userRequest = new InstanceRequest(requestProperties);
TaxonomyResourceProvider provider = new TaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
}
@Test
public void testCreateResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<ResourceDefinition> resourceDefinitionCapture = newCapture();
Capture<Request> requestCapture = newCapture();
// empty response indicates that resource doesn't already exist
Collection<Map<String, Object>> queryResult = new ArrayList<>();
// mock expectations
expect(queryFactory.createTaxonomyQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
typeSystem.createEntity(capture(resourceDefinitionCapture), capture(requestCapture));
replay(typeSystem, queryFactory, query);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request userRequest = new InstanceRequest(requestProperties);
TaxonomyResourceProvider provider = new TaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
assertEquals(new TaxonomyResourceDefinition().getTypeName(),
resourceDefinitionCapture.getValue().getTypeName());
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(requestProperties, request.getProperties());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testCreateResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
// mock expectations
replay(typeSystem, queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("name", "taxonomyName");
Request userRequest = new InstanceRequest(requestProperties);
TaxonomyResourceProvider provider = new TaxonomyResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResources(userRequest);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.exception.ResourceAlreadyExistsException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.AtlasQuery;
import org.apache.atlas.catalog.query.QueryFactory;
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for TermResourceProvider.
*/
public class TermResourceProviderTest {
@Test
public void testGetResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
TermPath termPath = new TermPath("testTaxonomy", "termName");
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow = new HashMap<>();
queryResult.add(queryResultRow);
queryResultRow.put("name", "testTaxonomy.termName");
queryResultRow.put("description", "test term description");
queryResultRow.put("creation_time", "04/20/2016");
queryResultRow.put("acceptable_use", "anything");
queryResultRow.put("available_as_tag", true);
Map<String, Object> hierarchyMap = new HashMap<>();
queryResultRow.put("hierarchy", hierarchyMap);
hierarchyMap.put("path", "/");
hierarchyMap.put("short_name", "termName");
hierarchyMap.put("taxonomy", "testTaxonomy");
// mock expectations
expect(queryFactory.createTermQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
Result result = provider.getResourceById(userRequest);
assertEquals(result.getPropertyMaps().size(), 1);
assertEquals(result.getPropertyMaps().iterator().next(), queryResultRow);
Request request = requestCapture.getValue();
assertNull(request.getQueryString());
assertEquals(request.getAdditionalSelectProperties().size(), 0);
assertEquals(request.getProperties().size(), 2);
assertEquals(request.getProperties().get("termPath"), termPath);
assertEquals(request.getProperties().get("name"), termPath.getFullyQualifiedName());
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = ResourceNotFoundException.class)
public void testGetResource_404() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
// empty response should result in a ResourceNotFoundException
Collection<Map<String, Object>> emptyResponse = new ArrayList<>();
// mock expectations
expect(queryFactory.createTermQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(emptyResponse);
replay(typeSystem, queryFactory, query);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", new TermPath("taxonomyName.badTermName"));
Request request = new InstanceRequest(requestProperties);
provider.getResourceById(request);
}
@Test
public void testGetResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
TermPath termPath = new TermPath("testTaxonomy", null);
Collection<Map<String, Object>> queryResult = new ArrayList<>();
Map<String, Object> queryResultRow1 = new HashMap<>();
queryResult.add(queryResultRow1);
queryResultRow1.put("name", "testTaxonomy.termName");
queryResultRow1.put("description", "test term description");
queryResultRow1.put("creation_time", "04/20/2016");
queryResultRow1.put("acceptable_use", "anything");
queryResultRow1.put("available_as_tag", true);
Map<String, Object> hierarchyMap = new HashMap<>();
queryResultRow1.put("hierarchy", hierarchyMap);
hierarchyMap.put("path", "/");
hierarchyMap.put("short_name", "termName");
hierarchyMap.put("taxonomy", "testTaxonomy");
Map<String, Object> queryResultRow2 = new HashMap<>();
queryResult.add(queryResultRow2);
queryResultRow2.put("name", "testTaxonomy.termName2");
queryResultRow2.put("description", "test term 2 description");
queryResultRow2.put("creation_time", "04/21/2016");
queryResultRow2.put("acceptable_use", "anything");
queryResultRow2.put("available_as_tag", true);
Map<String, Object> hierarchyMap2 = new HashMap<>();
queryResultRow2.put("hierarchy", hierarchyMap2);
hierarchyMap2.put("path", "/");
hierarchyMap2.put("short_name", "termName2");
hierarchyMap2.put("taxonomy", "testTaxonomy");
// mock expectations
expect(queryFactory.createTermQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new CollectionRequest(requestProperties, "name:taxonomy*");
// invoke test method
Result result = provider.getResources(userRequest);
assertEquals(result.getPropertyMaps().size(), 2);
assertTrue(result.getPropertyMaps().contains(queryResultRow1));
assertTrue(result.getPropertyMaps().contains(queryResultRow2));
Request request = requestCapture.getValue();
assertEquals(request.getQueryString(), "name:taxonomy*");
assertEquals(request.getAdditionalSelectProperties().size(), 0);
assertEquals(request.getProperties().size(), 1);
verify(typeSystem, queryFactory, query);
}
@Test
public void testGetResources_noResults() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<Request> requestCapture = newCapture();
TermPath termPath = new TermPath("testTaxonomy", "termName");
// empty result shouldn't result in exception for collection query
Collection<Map<String, Object>> queryResult = new ArrayList<>();
// mock expectations
expect(queryFactory.createTermQuery(capture(requestCapture))).andReturn(query);
expect(query.execute()).andReturn(queryResult);
replay(typeSystem, queryFactory, query);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new CollectionRequest(requestProperties, "name:taxonomy*");
// invoke test method
Result result = provider.getResources(userRequest);
assertEquals(0, result.getPropertyMaps().size());
Request request = requestCapture.getValue();
assertEquals(request.getQueryString(), "name:taxonomy*");
assertEquals(request.getAdditionalSelectProperties().size(), 0);
assertEquals(request.getProperties().size(), 1);
verify(typeSystem, queryFactory, query);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testCreateResource_invalidRequest__noName() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
// null term name should result in InvalidPayloadException
TermPath termPath = new TermPath("testTaxonomy", null);
// mock expectations
replay(typeSystem, queryFactory, query);
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResource(userRequest);
}
@Test
public void testCreateResource() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<ResourceDefinition> resourceDefinitionCapture = newCapture();
ResourceProvider taxonomyResourceProvider = createStrictMock(TaxonomyResourceProvider.class);
Capture<Request> taxonomyRequestCapture = newCapture();
Collection<Map<String, Object>> taxonomyQueryResult = new ArrayList<>();
Map<String, Object> taxonomyQueryResultRow = new HashMap<>();
taxonomyQueryResult.add(taxonomyQueryResultRow);
taxonomyQueryResultRow.put("name", "testTaxonomy");
taxonomyQueryResultRow.put("id", "11-22-33");
Result taxonomyResult = new Result(taxonomyQueryResult);
Map<String, Object> expectedRequestProps = new HashMap<>();
expectedRequestProps.put("name", "testTaxonomy.termName");
// when not specified, the default value of 'true' should be set
expectedRequestProps.put("available_as_tag", true);
// mock expectations
expect(taxonomyResourceProvider.getResourceById(capture(taxonomyRequestCapture))).andReturn(taxonomyResult);
typeSystem.createTraitType(capture(resourceDefinitionCapture), eq("testTaxonomy.termName"), EasyMock.<String>isNull());
typeSystem.createTraitInstance("11-22-33", "testTaxonomy.termName", expectedRequestProps);
replay(typeSystem, queryFactory, query, taxonomyResourceProvider);
TermResourceProvider provider = new TestTermResourceProvider(typeSystem, taxonomyResourceProvider);
provider.setQueryFactory(queryFactory);
TermPath termPath = new TermPath("testTaxonomy", "termName");
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
provider.createResource(userRequest);
Request taxonomyRequest = taxonomyRequestCapture.getValue();
Map<String, Object> taxonomyRequestProps = taxonomyRequest.getProperties();
assertEquals(taxonomyRequestProps.size(), 1);
assertEquals(taxonomyRequestProps.get("name"), "testTaxonomy");
assertEquals(taxonomyRequest.getAdditionalSelectProperties().size(), 1);
assertEquals(taxonomyRequest.getAdditionalSelectProperties().iterator().next(), "id");
assertNull(taxonomyRequest.getQueryString());
ResourceDefinition resourceDefinition = resourceDefinitionCapture.getValue();
assertEquals(resourceDefinition.getTypeName(), "Term");
verify(typeSystem, queryFactory, query, taxonomyResourceProvider);
}
@Test(expectedExceptions = ResourceAlreadyExistsException.class)
public void testCreateResource_invalidRequest__alreadyExists() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
AtlasQuery query = createStrictMock(AtlasQuery.class);
Capture<ResourceDefinition> resourceDefinitionCapture = newCapture();
ResourceProvider taxonomyResourceProvider = createStrictMock(TaxonomyResourceProvider.class);
Capture<Request> taxonomyRequestCapture = newCapture();
Collection<Map<String, Object>> taxonomyQueryResult = new ArrayList<>();
Map<String, Object> taxonomyQueryResultRow = new HashMap<>();
taxonomyQueryResult.add(taxonomyQueryResultRow);
taxonomyQueryResultRow.put("name", "testTaxonomy");
taxonomyQueryResultRow.put("id", "11-22-33");
Result taxonomyResult = new Result(taxonomyQueryResult);
// mock expectations
expect(taxonomyResourceProvider.getResourceById(capture(taxonomyRequestCapture))).andReturn(taxonomyResult);
typeSystem.createTraitType(capture(resourceDefinitionCapture), eq("testTaxonomy.termName"), EasyMock.<String>isNull());
expectLastCall().andThrow(new ResourceAlreadyExistsException(""));
replay(typeSystem, queryFactory, query, taxonomyResourceProvider);
TermResourceProvider provider = new TestTermResourceProvider(typeSystem, taxonomyResourceProvider);
provider.setQueryFactory(queryFactory);
TermPath termPath = new TermPath("testTaxonomy", "termName");
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
provider.createResource(userRequest);
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testCreateResources() throws Exception {
AtlasTypeSystem typeSystem = createStrictMock(AtlasTypeSystem.class);
QueryFactory queryFactory = createStrictMock(QueryFactory.class);
// mock expectations
replay(typeSystem, queryFactory);
TermPath termPath = new TermPath("testTaxonomy", "termName");
Map<String, Object> requestProperties = new HashMap<>();
requestProperties.put("termPath", termPath);
Request userRequest = new InstanceRequest(requestProperties);
TermResourceProvider provider = new TermResourceProvider(typeSystem);
provider.setQueryFactory(queryFactory);
provider.createResources(userRequest);
}
private static class TestTermResourceProvider extends TermResourceProvider {
private ResourceProvider testTaxonomyResourceProvider;
public TestTermResourceProvider(AtlasTypeSystem typeSystem, ResourceProvider taxonomyResourceProvider) {
super(typeSystem);
testTaxonomyResourceProvider = taxonomyResourceProvider;
}
@Override
protected synchronized ResourceProvider getTaxonomyResourceProvider() {
return testTaxonomyResourceProvider;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.testng.Assert.*;
/**
* Unit tests for VertexWrapper.
*/
public class VertexWrapperTest {
@Test
public void testGetVertex() {
Vertex v = createStrictMock(Vertex.class);
ResourceDefinition resourceDefinition = createStrictMock(ResourceDefinition.class);
// just return null for these because they aren't used in this test
expect(resourceDefinition.getPropertyMapper()).andReturn(null);
expect(resourceDefinition.getPropertyValueFormatters()).andReturn(null);
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn("testType");
replay(v, resourceDefinition);
VertexWrapper vWrapper = new VertexWrapper(v, resourceDefinition);
assertEquals(vWrapper.getVertex(), v);
verify(v, resourceDefinition);
}
@SuppressWarnings("unchecked")
@Test
public void testGetProperty() {
String testType = "testType";
String propName = "propName";
String qualifiedPropName = "Prefix.propName";
String propValue = "val";
String formattedValue = "value";
Vertex v = createStrictMock(Vertex.class);
PropertyMapper propertyMapper = createStrictMock(PropertyMapper.class);
PropertyValueFormatter formatter = createStrictMock(PropertyValueFormatter.class);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(propertyMapper.toFullyQualifiedName(propName, testType)).andReturn(qualifiedPropName);
expect(v.getProperty(qualifiedPropName)).andReturn(propValue);
expect(formatter.format(propValue)).andReturn((formattedValue));
replay(v, propertyMapper, formatter);
VertexWrapper vWrapper = new VertexWrapper(v, propertyMapper, Collections.singletonMap(propName, formatter));
assertEquals(vWrapper.getProperty(propName), formattedValue);
// now remove prop
vWrapper.removeProperty(propName);
assertNull(vWrapper.getProperty(propName));
verify(v, propertyMapper, formatter);
}
@SuppressWarnings("unchecked")
@Test
public void testGetProperty2() {
String testType = "testType";
String propName = "propName";
String qualifiedPropName = "Prefix.propName";
String propValue = "val";
String formattedValue = "value";
Vertex v = createStrictMock(Vertex.class);
ResourceDefinition resourceDefinition = createStrictMock(ResourceDefinition.class);
PropertyMapper propertyMapper = createStrictMock(PropertyMapper.class);
PropertyValueFormatter formatter = createStrictMock(PropertyValueFormatter.class);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(resourceDefinition.getPropertyMapper()).andReturn(propertyMapper);
expect(resourceDefinition.getPropertyValueFormatters()).andReturn(Collections.singletonMap(propName, formatter));
expect(propertyMapper.toFullyQualifiedName(propName, testType)).andReturn(qualifiedPropName);
expect(v.getProperty(qualifiedPropName)).andReturn(propValue);
expect(formatter.format(propValue)).andReturn((formattedValue));
replay(v, resourceDefinition, propertyMapper, formatter);
VertexWrapper vWrapper = new VertexWrapper(v, resourceDefinition);
assertEquals(vWrapper.getProperty(propName), formattedValue);
// now remove prop
vWrapper.removeProperty(propName);
assertNull(vWrapper.getProperty(propName));
verify(v, resourceDefinition, propertyMapper, formatter);
}
@Test
public void testGetProperty_removed() {
String testType = "testType";
String propName = "propName";
Vertex v = createStrictMock(Vertex.class);
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
// vertex shouldn't be asked for the removed property
replay(v);
VertexWrapper vWrapper = new VertexWrapper(v, null, Collections.<String, PropertyValueFormatter>emptyMap());
vWrapper.removeProperty(propName);
assertNull(vWrapper.getProperty(propName));
verify(v);
}
@Test
public void testGetPropertyKeys() {
String testType = "testType";
// vertex returns unordered set
Set<String> propertyKeys = new HashSet<>();
propertyKeys.add("foobar");
propertyKeys.add("Prefix.foo");
propertyKeys.add("Prefix.bar");
Vertex v = createStrictMock(Vertex.class);
PropertyMapper propertyMapper = createMock(PropertyMapper.class);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(v.getPropertyKeys()).andReturn(propertyKeys);
expect(propertyMapper.toCleanName("Prefix.bar", testType)).andReturn("bar");
expect(propertyMapper.toCleanName("Prefix.foo", testType)).andReturn("foo");
expect(propertyMapper.toCleanName("foobar", testType)).andReturn("foobar");
replay(v, propertyMapper);
VertexWrapper vWrapper = new VertexWrapper(v, propertyMapper,
Collections.<String, PropertyValueFormatter>emptyMap());
Collection<String> resultKeys = vWrapper.getPropertyKeys();
Iterator<String> propIterator = resultKeys.iterator();
assertEquals(resultKeys.size(), 3);
// natural ordering is applied in vertex wrapper
assertEquals(propIterator.next(), "bar");
assertEquals(propIterator.next(), "foo");
assertEquals(propIterator.next(), "foobar");
verify(v, propertyMapper);
}
@Test
public void testGetPropertyKeys_removed() {
String testType = "testType";
Set<String> propertyKeys = new TreeSet<>();
propertyKeys.add("Prefix.foo");
propertyKeys.add("Prefix.bar");
propertyKeys.add("foobar");
Vertex v = createStrictMock(Vertex.class);
PropertyMapper propertyMapper = createStrictMock(PropertyMapper.class);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(v.getPropertyKeys()).andReturn(propertyKeys);
// natural ordering provided by TreeSet
expect(propertyMapper.toCleanName("Prefix.bar", testType)).andReturn("bar");
expect(propertyMapper.toCleanName("Prefix.foo", testType)).andReturn("foo");
expect(propertyMapper.toCleanName("foobar", testType)).andReturn("foobar");
replay(v, propertyMapper);
VertexWrapper vWrapper = new VertexWrapper(v, propertyMapper,
Collections.<String, PropertyValueFormatter>emptyMap());
// remove props
vWrapper.removeProperty("foo");
vWrapper.removeProperty("foobar");
Collection<String> resultKeys = vWrapper.getPropertyKeys();
assertEquals(resultKeys.size(), 1);
assertTrue(resultKeys.contains("bar"));
verify(v, propertyMapper);
}
@Test
public void testGetPropertyMap() {
String testType = "testType";
Set<String> propertyKeys = new HashSet<>();
propertyKeys.add("Prefix.foo");
propertyKeys.add("Prefix.bar");
propertyKeys.add("foobar");
Vertex v = createMock(Vertex.class);
PropertyMapper propertyMapper = createMock(PropertyMapper.class);
PropertyValueFormatter formatter = createMock(PropertyValueFormatter.class);
Map<String, PropertyValueFormatter> valueFormatters = new HashMap<>();
valueFormatters.put("foo", formatter);
valueFormatters.put("bar", formatter);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(v.getPropertyKeys()).andReturn(propertyKeys);
expect(v.getProperty("Prefix.foo")).andReturn("Prefix.foo:Value");
expect(v.getProperty("Prefix.bar")).andReturn("Prefix.bar:Value");
expect(v.getProperty("foobar")).andReturn("foobarValue");
expect(propertyMapper.toCleanName("Prefix.bar", testType)).andReturn("bar");
expect(propertyMapper.toCleanName("Prefix.foo", testType)).andReturn("foo");
expect(propertyMapper.toCleanName("foobar", testType)).andReturn("foobar");
expect(formatter.format("Prefix.foo:Value")).andReturn("fooValue");
expect(formatter.format("Prefix.bar:Value")).andReturn("barValue");
replay(v, propertyMapper, formatter);
VertexWrapper vWrapper = new VertexWrapper(v, propertyMapper, valueFormatters);
Map<String, Object> resultMap = vWrapper.getPropertyMap();
assertEquals(resultMap.size(), 3);
Iterator<Map.Entry<String, Object>> iter = resultMap.entrySet().iterator();
Map.Entry<String, Object> entry1 = iter.next();
assertEquals(entry1.getKey(), "bar");
assertEquals(entry1.getValue(), "barValue");
Map.Entry<String, Object> entry2 = iter.next();
assertEquals(entry2.getKey(), "foo");
assertEquals(entry2.getValue(), "fooValue");
Map.Entry<String, Object> entry3 = iter.next();
assertEquals(entry3.getKey(), "foobar");
assertEquals(entry3.getValue(), "foobarValue");
verify(v, propertyMapper, formatter);
}
@Test
public void testGetPropertyMap_removed() {
String testType = "testType";
Set<String> propertyKeys = new HashSet<>();
propertyKeys.add("Prefix.foo");
propertyKeys.add("Prefix.bar");
propertyKeys.add("foobar");
Vertex v = createMock(Vertex.class);
PropertyMapper propertyMapper = createMock(PropertyMapper.class);
PropertyValueFormatter formatter = createMock(PropertyValueFormatter.class);
Map<String, PropertyValueFormatter> valueFormatters = new HashMap<>();
valueFormatters.put("foo", formatter);
valueFormatters.put("bar", formatter);
// mock expectations
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
expect(v.getPropertyKeys()).andReturn(propertyKeys);
expect(v.getProperty("Prefix.bar")).andReturn("Prefix.bar:Value");
expect(v.getProperty("foobar")).andReturn("foobarValue");
expect(propertyMapper.toCleanName("Prefix.bar", testType)).andReturn("bar");
expect(propertyMapper.toCleanName("Prefix.foo", testType)).andReturn("foo");
expect(propertyMapper.toCleanName("foobar", testType)).andReturn("foobar");
expect(formatter.format("Prefix.bar:Value")).andReturn("barValue");
replay(v, propertyMapper, formatter);
VertexWrapper vWrapper = new VertexWrapper(v, propertyMapper, valueFormatters);
//remove "foo" property
vWrapper.removeProperty("foo");
Map<String, Object> resultMap = vWrapper.getPropertyMap();
assertEquals(resultMap.size(), 2);
Iterator<Map.Entry<String, Object>> iter = resultMap.entrySet().iterator();
Map.Entry<String, Object> entry1 = iter.next();
assertEquals(entry1.getKey(), "bar");
assertEquals(entry1.getValue(), "barValue");
Map.Entry<String, Object> entry2 = iter.next();
assertEquals(entry2.getKey(), "foobar");
assertEquals(entry2.getValue(), "foobarValue");
verify(v, propertyMapper, formatter);
}
@Test
public void testIsPropertyRemoved() {
String testType = "testType";
Vertex v = createMock(Vertex.class);
expect(v.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY)).andReturn(testType);
replay(v);
VertexWrapper vWrapper = new VertexWrapper(v, null,
Collections.<String, PropertyValueFormatter>emptyMap());
vWrapper.removeProperty("foo");
assertTrue(vWrapper.isPropertyRemoved("foo"));
assertFalse(vWrapper.isPropertyRemoved("bar"));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.catalog.CollectionRequest;
import org.apache.atlas.catalog.InstanceRequest;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.testng.annotations.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for EntityResourceDefinition.
*/
public class EntityResourceDefinitionTest {
@Test
public void testGetIdPropertyName() {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
assertEquals(entityDefinition.getIdPropertyName(), "id");
}
@Test
public void testGetTypeName() {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
assertNull(entityDefinition.getTypeName());
}
@Test
public void testResolveHref() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "foo");
ResourceDefinition entityDefinition = new EntityResourceDefinition();
String href = entityDefinition.resolveHref(resourceProps);
assertEquals(href, "v1/entities/111-222-333");
}
// Because we don't currently support entity creation, this method is basically a no-op.
@Test
public void testValidate() throws Exception {
Request request = new InstanceRequest(Collections.<String, Object>emptyMap());
ResourceDefinition entityDefinition = new EntityResourceDefinition();
entityDefinition.validate(request);
}
// Because we don't currently support entity creation, no properties are registered
@Test
public void testGetPropertyDefinitions() {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
assertTrue(entityDefinition.getPropertyDefinitions().isEmpty());
}
@Test
public void testFilterProperties_Instance() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("fooBar", "fooBarVal");
resourceProps.put("other", "otherVal");
Request request = new InstanceRequest(resourceProps);
ResourceDefinition entityDefinition = new EntityResourceDefinition();
// no filtering should occur for entity instances
assertEquals(entityDefinition.filterProperties(request, resourceProps), resourceProps);
}
@Test
public void testFilterProperties_Collection() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("fooBar", "fooBarVal");
resourceProps.put("other", "otherVal");
Request request = new CollectionRequest(resourceProps, "someProperty:someValue");
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition entityDefinition = new EntityResourceDefinition();
// no filtering should occur for entity instances
Map<String, Object> filteredProps = entityDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProps.size(), 4);
// registered collection props
assertTrue(filteredProps.containsKey("name"));
assertTrue(filteredProps.containsKey("id"));
assertTrue(filteredProps.containsKey("type"));
// added prop
assertTrue(filteredProps.containsKey("foo"));
}
@Test
public void testGetProjections() {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
Map<String, Projection> projections = entityDefinition.getProjections();
assertEquals(projections.size(), 3);
assertTrue(projections.containsKey("tags"));
assertTrue(projections.containsKey("traits"));
assertTrue(projections.containsKey("default"));
}
@Test
public void testGetRelations() {
ResourceDefinition entityDefinition = new EntityResourceDefinition();
Map<String, Relation> relations = entityDefinition.getRelations();
assertEquals(relations.size(), 2);
assertTrue(relations.containsKey("tags"));
assertTrue(relations.containsKey("traits"));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.catalog.CollectionRequest;
import org.apache.atlas.catalog.InstanceRequest;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.testng.annotations.Test;
import java.util.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for EntityTagResourceDefinition.
*/
public class EntityTagResourceDefinitionTest {
@Test
public void testGetIdPropertyName() {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
assertEquals(entityTagDefinition.getIdPropertyName(), "name");
}
@Test
public void testGetTypeName() {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
assertNull(entityTagDefinition.getTypeName());
}
@Test
public void testResolveHref() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("name", "taxonomy1.term1.term11");
resourceProps.put(EntityTagResourceDefinition.ENTITY_GUID_PROPERTY, "11-22-33");
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
String href = entityTagDefinition.resolveHref(resourceProps);
assertEquals(href, "v1/entities/11-22-33/tags/taxonomy1.term1.term11");
}
@Test
public void testValidate() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "taxonomy1.termName");
Request request = new InstanceRequest(properties);
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
entityTagDefinition.validate(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_missingName() throws Exception {
Map<String, Object> properties = new HashMap<>();
Request request = new InstanceRequest(properties);
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
entityTagDefinition.validate(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_invalidProperty() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "foo");
properties.put("description", "desc");
Request request = new InstanceRequest(properties);
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
entityTagDefinition.validate(request);
}
@Test
public void testGetPropertyDefinitions() {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
Collection<AttributeDefinition> propertyDefinitions = entityTagDefinition.getPropertyDefinitions();
assertEquals(propertyDefinitions.size(), 1);
Set<String> defNames = new HashSet<>();
for (AttributeDefinition def : propertyDefinitions) {
defNames.add(def.name);
}
assertTrue(defNames.contains("name"));
}
@Test
public void testFilterProperties_Instance() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
resourceProps.put("acceptable_use", "something");
resourceProps.put("available_as_tag", true);
resourceProps.put("other", "otherVal");
Request request = new InstanceRequest(resourceProps);
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
Map<String, Object> filteredProperties = entityTagDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProperties.size(), 4);
// registered collection props
assertTrue(filteredProperties.containsKey("name"));
assertTrue(filteredProperties.containsKey("description"));
assertTrue(filteredProperties.containsKey("creation_time"));
// added prop
assertTrue(filteredProperties.containsKey("foo"));
}
@Test
public void testFilterProperties_Collection() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
resourceProps.put("acceptable_use", "something");
resourceProps.put("available_as_tag", true);
resourceProps.put("other", "otherVal");
Request request = new CollectionRequest(resourceProps, "someProperty:someValue");
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
Map<String, Object> filteredProps = entityTagDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProps.size(), 3);
// registered collection props
assertTrue(filteredProps.containsKey("name"));
assertTrue(filteredProps.containsKey("description"));
// added prop
assertTrue(filteredProps.containsKey("foo"));
}
@Test
public void testGetProjections() {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
Map<String, Projection> projections = entityTagDefinition.getProjections();
assertEquals(projections.size(), 1);
assertTrue(projections.containsKey("terms"));
}
@Test
public void testGetRelations() {
ResourceDefinition entityTagDefinition = new EntityTagResourceDefinition();
Map<String, Relation> relations = entityTagDefinition.getRelations();
assertTrue(relations.isEmpty());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.catalog.CollectionRequest;
import org.apache.atlas.catalog.InstanceRequest;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.testng.annotations.Test;
import java.util.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for TaxonomyResourceDefinition.
*/
public class TaxonomyResourceDefinitionTest {
@Test
public void testGetIdPropertyName() {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
assertEquals(taxonomyDefinition.getIdPropertyName(), "name");
}
@Test
public void testGetTypeName() {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
assertEquals(taxonomyDefinition.getTypeName(), "Taxonomy");
}
@Test
public void testResolveHref() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "foo");
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
String href = taxonomyDefinition.resolveHref(resourceProps);
assertEquals(href, "v1/taxonomies/foo");
}
@Test
public void testValidate() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "taxonomyName");
properties.put("description", "foo");
Request request = new InstanceRequest(properties);
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
taxonomyDefinition.validate(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_missingName() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("description", "foo");
Request request = new InstanceRequest(properties);
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
taxonomyDefinition.validate(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_invalidProperty() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "foo");
properties.put("unknownProperty", "value");
Request request = new InstanceRequest(properties);
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
taxonomyDefinition.validate(request);
}
@Test
public void testGetPropertyDefinitions() {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
Collection<AttributeDefinition> propertyDefinitions = taxonomyDefinition.getPropertyDefinitions();
assertEquals(propertyDefinitions.size(), 2);
Set<String> defNames = new HashSet<>();
for (AttributeDefinition def : propertyDefinitions) {
defNames.add(def.name);
}
assertTrue(defNames.contains("name"));
assertTrue(defNames.contains("description"));
}
@Test
public void testFilterProperties_Instance() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
Request request = new InstanceRequest(resourceProps);
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
Map<String, Object> filteredProperties = taxonomyDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProperties.size(), 4);
// registered collection props
assertTrue(filteredProperties.containsKey("name"));
assertTrue(filteredProperties.containsKey("description"));
assertTrue(filteredProperties.containsKey("creation_time"));
// added prop
assertTrue(filteredProperties.containsKey("foo"));
}
@Test
public void testFilterProperties_Collection() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
Request request = new CollectionRequest(resourceProps, "someProperty:someValue");
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
Map<String, Object> filteredProps = taxonomyDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProps.size(), 3);
// registered collection props
assertTrue(filteredProps.containsKey("name"));
assertTrue(filteredProps.containsKey("description"));
// added prop
assertTrue(filteredProps.containsKey("foo"));
}
@Test
public void testGetProjections() {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
Map<String, Projection> projections = taxonomyDefinition.getProjections();
assertEquals(projections.size(), 1);
assertTrue(projections.containsKey("terms"));
}
@Test
public void testGetRelations() {
ResourceDefinition taxonomyDefinition = new TaxonomyResourceDefinition();
Map<String, Relation> relations = taxonomyDefinition.getRelations();
assertTrue(relations.isEmpty());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.definition;
import org.apache.atlas.catalog.CollectionRequest;
import org.apache.atlas.catalog.InstanceRequest;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.projection.Projection;
import org.apache.atlas.catalog.projection.Relation;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.testng.annotations.Test;
import java.util.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for TermResourceDefinition.
*/
public class TermResourceDefinitionTest {
@Test
public void testGetIdPropertyName() {
ResourceDefinition termDefinition = new TermResourceDefinition();
assertEquals(termDefinition.getIdPropertyName(), "name");
}
@Test
public void testGetTypeName() {
ResourceDefinition termDefinition = new TermResourceDefinition();
assertEquals(termDefinition.getTypeName(), "Term");
}
@Test
public void testResolveHref() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("name", "taxonomy1.term1.term11");
ResourceDefinition termDefinition = new TermResourceDefinition();
String href = termDefinition.resolveHref(resourceProps);
assertEquals(href, "v1/taxonomies/taxonomy1/terms/term1/terms/term11");
}
@Test
public void testValidate() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "taxonomy1.termName");
properties.put("description", "foo");
properties.put("available_as_tag", true);
properties.put("acceptable_use", "something");
Request request = new InstanceRequest(properties);
ResourceDefinition termDefinition = new TermResourceDefinition();
termDefinition.validate(request);
}
@Test
public void testValidate_nameOnly() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "taxonomy1.termName");
Request request = new InstanceRequest(properties);
ResourceDefinition termDefinition = new TermResourceDefinition();
termDefinition.validate(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_invalidTermName() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "NotQualifiedTermName");
properties.put("description", "foo");
properties.put("available_as_tag", true);
Request request = new InstanceRequest(properties);
ResourceDefinition termDefinition = new TermResourceDefinition();
termDefinition.validate(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_missingName() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("description", "foo");
Request request = new InstanceRequest(properties);
ResourceDefinition termDefinition = new TermResourceDefinition();
termDefinition.validate(request);
}
@Test(expectedExceptions = InvalidPayloadException.class)
public void testValidate_invalidProperty() throws Exception {
Map<String, Object> properties = new HashMap<>();
properties.put("name", "foo");
properties.put("unknownProperty", "value");
Request request = new InstanceRequest(properties);
ResourceDefinition termDefinition = new TermResourceDefinition();
termDefinition.validate(request);
}
@Test
public void testGetPropertyDefinitions() {
ResourceDefinition termDefinition = new TermResourceDefinition();
Collection<AttributeDefinition> propertyDefinitions = termDefinition.getPropertyDefinitions();
assertEquals(propertyDefinitions.size(), 4);
Set<String> defNames = new HashSet<>();
for (AttributeDefinition def : propertyDefinitions) {
defNames.add(def.name);
}
assertTrue(defNames.contains("name"));
assertTrue(defNames.contains("description"));
assertTrue(defNames.contains("available_as_tag"));
assertTrue(defNames.contains("acceptable_use"));
}
@Test
public void testFilterProperties_Instance() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
resourceProps.put("acceptable_use", "something");
resourceProps.put("available_as_tag", true);
resourceProps.put("other", "otherVal");
Request request = new InstanceRequest(resourceProps);
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition termDefinition = new TermResourceDefinition();
Map<String, Object> filteredProperties = termDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProperties.size(), 6);
// registered collection props
assertTrue(filteredProperties.containsKey("name"));
assertTrue(filteredProperties.containsKey("description"));
assertTrue(filteredProperties.containsKey("available_as_tag"));
assertTrue(filteredProperties.containsKey("acceptable_use"));
assertTrue(filteredProperties.containsKey("creation_time"));
// added prop
assertTrue(filteredProperties.containsKey("foo"));
}
@Test
public void testFilterProperties_Collection() {
Map<String, Object> resourceProps = new HashMap<>();
resourceProps.put("id", "111-222-333");
resourceProps.put("name", "nameVal");
resourceProps.put("type", "someType");
resourceProps.put("foo", "fooVal");
resourceProps.put("bar", "barVal");
resourceProps.put("description", "desc");
resourceProps.put("creation_time", "2016:10:10");
resourceProps.put("acceptable_use", "something");
resourceProps.put("available_as_tag", true);
resourceProps.put("other", "otherVal");
Request request = new CollectionRequest(resourceProps, "someProperty:someValue");
request.addAdditionalSelectProperties(Collections.singleton("foo"));
ResourceDefinition termDefinition = new TermResourceDefinition();
Map<String, Object> filteredProps = termDefinition.filterProperties(request, resourceProps);
assertEquals(filteredProps.size(), 3);
// registered collection props
assertTrue(filteredProps.containsKey("name"));
assertTrue(filteredProps.containsKey("description"));
// added prop
assertTrue(filteredProps.containsKey("foo"));
}
@Test
public void testGetProjections() {
ResourceDefinition termDefinition = new TermResourceDefinition();
Map<String, Projection> projections = termDefinition.getProjections();
assertEquals(projections.size(), 2);
assertTrue(projections.containsKey("terms"));
assertTrue(projections.containsKey("hierarchy"));
}
@Test
public void testGetRelations() {
ResourceDefinition termDefinition = new TermResourceDefinition();
Map<String, Relation> relations = termDefinition.getRelations();
assertTrue(relations.isEmpty());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.VertexWrapper;
import org.testng.annotations.Test;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for AlwaysQueryExpression.
*/
public class AlwaysQueryExpressionTest {
@Test
public void testEvaluate() {
VertexWrapper v = createStrictMock(VertexWrapper.class);
replay(v);
QueryExpression expression = new AlwaysQueryExpression();
// always returns true
assertTrue(expression.evaluate(v));
verify(v);
}
@Test
public void testEvaluate_negated() {
VertexWrapper v = createStrictMock(VertexWrapper.class);
replay(v);
QueryExpression expression = new AlwaysQueryExpression();
expression.setNegate();
// always returns true
assertFalse(expression.evaluate(v));
assertTrue(expression.isNegate());
verify(v);
}
@Test
public void testGetProperties() {
VertexWrapper v = createStrictMock(VertexWrapper.class);
replay(v);
QueryExpression expression = new AlwaysQueryExpression();
assertTrue(expression.getProperties().isEmpty());
verify(v);
}
@Test
public void testAsPipe() {
VertexWrapper v = createStrictMock(VertexWrapper.class);
replay(v);
QueryExpression expression = new AlwaysQueryExpression();
assertNull(expression.asPipe());
verify(v);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.catalog.query;
import org.apache.atlas.catalog.CollectionRequest;
import org.apache.atlas.catalog.InstanceRequest;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TermPath;
import org.apache.atlas.catalog.definition.EntityResourceDefinition;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.testng.annotations.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.testng.Assert.assertEquals;
/**
* Unit tests for QueryFactory.
*/
public class QueryFactoryTest {
@Test
public void testCreateTaxonomyQuery() throws Exception {
Map<String, Object> requestProps = new HashMap<>();
requestProps.put("name", "test_taxonomy");
Request request = new InstanceRequest(requestProps);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "test_taxonomy");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCreateTermQuery() throws Exception {
Map<String, Object> requestProps = new HashMap<>();
requestProps.put("name", "test_taxonomy.term1");
requestProps.put("termPath", new TermPath("test_taxonomy.term1"));
Request request = new InstanceRequest(requestProps);
QueryFactory factory = new QueryFactory();
AtlasTermQuery query = (AtlasTermQuery) factory.createTermQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "test_taxonomy.term1");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Term");
}
@Test
public void testCreateEntityQuery() throws Exception {
Map<String, Object> requestProps = new HashMap<>();
requestProps.put("id", "foo");
Request request = new InstanceRequest(requestProps);
QueryFactory factory = new QueryFactory();
AtlasEntityQuery query = (AtlasEntityQuery) factory.createEntityQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermQueryExpression.class);
assertEquals(queryExpression.getField(), "id");
assertEquals(queryExpression.getExpectedValue(), "foo");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getClass(), EntityResourceDefinition.class);
}
@Test
public void testCreateEntityTagQuery() throws Exception {
Map<String, Object> requestProps = new HashMap<>();
requestProps.put("id", "entity_id");
requestProps.put("name", "test_taxonomy.term1");
Request request = new InstanceRequest(requestProps);
QueryFactory factory = new QueryFactory();
AtlasEntityTagQuery query = (AtlasEntityTagQuery) factory.createEntityTagQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "test_taxonomy.term1");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getClass(), EntityTagResourceDefinition.class);
}
@Test
public void testCollectionQuery_TermQuery() throws Exception {
String queryString = "name:test_taxonomy";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "test_taxonomy");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCollectionQuery_PrefixQuery() throws Exception {
String queryString = "name:t*";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), PrefixQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "t");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCollectionQuery_TermRangeQuery() throws Exception {
String queryString = "creation_time:[2013-01-01:07:29:00 TO 2017-01-02]";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), TermRangeQueryExpression.class);
assertEquals(queryExpression.getField(), "creation_time");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCollectionQuery_WildcardQuery() throws Exception {
String queryString = "name:ta?onomy";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), WildcardQueryExpression.class);
assertEquals(queryExpression.getField(), "name");
assertEquals(queryExpression.getExpectedValue(), "ta?onomy");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCollectionQuery_BooleanQuery() throws Exception {
String queryString = "name:foo OR name:bar";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), BooleanQueryExpression.class);
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
@Test
public void testCollectionQuery_ProjectionQuery() throws Exception {
String queryString = "relation/name:foo";
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
QueryFactory factory = new QueryFactory();
AtlasTaxonomyQuery query = (AtlasTaxonomyQuery) factory.createTaxonomyQuery(request);
QueryExpression queryExpression = query.getQueryExpression();
assertEquals(queryExpression.getClass(), ProjectionQueryExpression.class);
ProjectionQueryExpression projectionExpression = (ProjectionQueryExpression) queryExpression;
QueryExpression underlyingExpression = projectionExpression.getUnderlyingExpression();
assertEquals(underlyingExpression.getClass(), TermQueryExpression.class);
assertEquals(underlyingExpression.getField(), QueryFactory.escape("relation/name"));
assertEquals(underlyingExpression.getExpectedValue(), "foo");
assertEquals(query.getRequest(), request);
assertEquals(query.getResourceDefinition().getTypeName(), "Taxonomy");
}
}
......@@ -2,8 +2,8 @@
##r-READ, w-WRITE, u-UPDATE, d-DELETE
##Policy_Name;;User_Name1:Operations_Allowed,User_Name2:Operations_Allowed;;Group_Name1:Operations_Allowed,Group_Name2:Operations_Allowed;;Resource_Type1:Resource_Name,Resource_Type2:Resource_Name
##
adminPolicy;;admin:rwud;;ROLE_ADMIN:rwud;;type:*,entity:*,operation:*
typeReadPolicy;;nixon:rw;;;;type:*,entity:*
classReadPolicy;;saqeeb:r;;;;type:*,entity:*
dataScientistPolicy;;;;DATA_SCIENTIST:r;;type:*,entity:*
dataStewardPolicy;;;;DATA_STEWARD:rwu;;type:*,entity:*
adminPolicy;;admin:rwud;;ROLE_ADMIN:rwud;;type:*,entity:*,operation:*,taxonomy:*,term:*
typeReadPolicy;;nixon:rw;;;;type:*,entity:*,taxonomy:*,term:*
classReadPolicy;;saqeeb:r;;;;type:*,entity:*,taxonomy:*,term:*
dataScientistPolicy;;;;DATA_SCIENTIST:r;;type:*,entity:*,taxonomy:*,term:*
dataStewardPolicy;;;;DATA_STEWARD:rwu;;type:*,entity:*,taxonomy:*,term:*
......@@ -463,6 +463,7 @@
<module>graphdb</module>
<module>titan</module>
<module>repository</module>
<module>catalog</module>
<!-- <module>dashboard</module> -->
<module>dashboardv2</module>
<module>webapp</module>
......@@ -1087,6 +1088,12 @@
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-catalog</artifactId>
<version>${project.version}</version>
</dependency>
<!--Scala dependencies-->
<dependency>
<groupId>org.scala-lang</groupId>
......@@ -1238,7 +1245,7 @@
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<version>2.4</version>
<version>3.4</version>
<scope>test</scope>
</dependency>
......
......@@ -21,6 +21,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES:
ATLAS-491 Business Catalog / Taxonomy (jspeidel via yhemanth)
ATLAS-713 Entity lineage based on entity id (shwethags)
ATLAS-736 UI - BUG :: displaying timestamp values for hive_db description (kevalbhatt18 via yhemanth)
ATLAS-784 Configure config.store.uri for Falcon hook IT (yhemanth)
......
......@@ -313,10 +313,14 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang
ITypedReferenceableInstance[] typedInstances = deserializeClassInstances(entityInstanceDefinition);
final List<String> guids = repository.createEntities(typedInstances);
List<String> guids = createEntities(typedInstances);
return new JSONArray(guids).toString();
}
public List<String> createEntities(ITypedReferenceableInstance[] typedInstances) throws AtlasException {
final List<String> guids = repository.createEntities(typedInstances);
onEntitiesAdded(guids);
return new JSONArray(guids).toString();
return guids;
}
private ITypedReferenceableInstance[] deserializeClassInstances(String entityInstanceDefinition)
......@@ -327,18 +331,7 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang
for (int index = 0; index < referableInstances.length(); index++) {
Referenceable entityInstance =
InstanceSerialization.fromJsonReferenceable(referableInstances.getString(index), true);
final String entityTypeName = entityInstance.getTypeName();
ParamChecker.notEmpty(entityTypeName, "Entity type cannot be null");
ClassType entityType = typeSystem.getDataType(ClassType.class, entityTypeName);
//Both assigned id and values are required for full update
//classtype.convert() will remove values if id is assigned. So, set temp id, convert and
// then replace with original id
Id origId = entityInstance.getId();
entityInstance.replaceWithNewId(new Id(entityInstance.getTypeName()));
ITypedReferenceableInstance typedInstrance = entityType.convert(entityInstance, Multiplicity.REQUIRED);
((ReferenceableInstance)typedInstrance).replaceWithNewId(origId);
ITypedReferenceableInstance typedInstrance = getTypedReferenceableInstance(entityInstance);
instances[index] = typedInstrance;
}
return instances;
......@@ -350,6 +343,23 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang
}
}
@Override
public ITypedReferenceableInstance getTypedReferenceableInstance(Referenceable entityInstance) throws AtlasException {
final String entityTypeName = entityInstance.getTypeName();
ParamChecker.notEmpty(entityTypeName, "Entity type cannot be null");
ClassType entityType = typeSystem.getDataType(ClassType.class, entityTypeName);
//Both assigned id and values are required for full update
//classtype.convert() will remove values if id is assigned. So, set temp id, convert and
// then replace with original id
Id origId = entityInstance.getId();
entityInstance.replaceWithNewId(new Id(entityInstance.getTypeName()));
ITypedReferenceableInstance typedInstrance = entityType.convert(entityInstance, Multiplicity.REQUIRED);
((ReferenceableInstance)typedInstrance).replaceWithNewId(origId);
return typedInstrance;
}
/**
* Return the definition for the given guid.
*
......@@ -579,6 +589,10 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang
ParamChecker.notEmpty(traitInstanceDefinition, "trait instance definition");
ITypedStruct traitInstance = deserializeTraitInstance(traitInstanceDefinition);
addTrait(guid, traitInstance);
}
public void addTrait(String guid, ITypedStruct traitInstance) throws AtlasException {
final String traitName = traitInstance.getTypeName();
// ensure trait type is already registered with the TS
......@@ -591,7 +605,7 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang
// ensure trait is not already defined
Preconditions
.checkArgument(!getTraitNames(guid).contains(traitName), "trait=%s is already defined for entity=%s",
traitName, guid);
traitName, guid);
repository.addTrait(guid, traitInstance);
......@@ -601,8 +615,12 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang
private ITypedStruct deserializeTraitInstance(String traitInstanceDefinition)
throws AtlasException {
return createTraitInstance(InstanceSerialization.fromJsonStruct(traitInstanceDefinition, true));
}
@Override
public ITypedStruct createTraitInstance(Struct traitInstance) throws AtlasException {
try {
Struct traitInstance = InstanceSerialization.fromJsonStruct(traitInstanceDefinition, true);
final String entityTypeName = traitInstance.getTypeName();
ParamChecker.notEmpty(entityTypeName, "entity type");
......
......@@ -21,7 +21,10 @@ package org.apache.atlas.services;
import org.apache.atlas.AtlasException;
import org.apache.atlas.EntityAuditEvent;
import org.apache.atlas.listener.EntityChangeListener;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.types.DataTypes;
import org.codehaus.jettison.json.JSONObject;
......@@ -80,6 +83,26 @@ public interface MetadataService {
String createEntities(String entityDefinition) throws AtlasException;
/**
* Get a typed entity instance.
*
* @param entity entity
* @return typed entity instance
*
* @throws AtlasException if any failure occurs
*/
ITypedReferenceableInstance getTypedReferenceableInstance(Referenceable entity) throws AtlasException;
/**
* Create entity instances.
*
* @param typedInstances instance to create
* @return collection of guids for created entities
*
* @throws AtlasException if unable to create the entities
*/
List<String> createEntities(ITypedReferenceableInstance[] typedInstances) throws AtlasException;
/**
* Return the definition for the given guid.
*
* @param guid guid
......@@ -166,6 +189,11 @@ public interface MetadataService {
*/
void addTrait(String guid, String traitInstanceDefinition) throws AtlasException;
//todo:
void addTrait(String guid, ITypedStruct traitInstance) throws AtlasException;
ITypedStruct createTraitInstance(Struct traitInstance) throws AtlasException;
/**
* Deletes a given trait from an existing entity represented by a guid.
*
......
......@@ -108,6 +108,11 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-catalog</artifactId>
</dependency>
<!-- supports simple auth handler -->
<dependency>
......
......@@ -19,8 +19,6 @@
package org.apache.atlas.authorize;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.List;
import javax.servlet.ServletException;
......@@ -55,9 +53,13 @@ public class AtlasAuthorizationUtils {
}
if (u.startsWith(BASE_URL)) {
u = parse(u, BASE_URL);
} else {
// strip of leading '/'
u = u.substring(1);
}
String[] split = u.split("/");
return split[0];
String api = split[0];
return (! api.equals("v1")) ? api : String.format("v1/%s", split[1]);
}
public static AtlasActionTypes getAtlasAction(String method) {
......@@ -99,7 +101,7 @@ public class AtlasAuthorizationUtils {
if (api.startsWith("types")) {
resourceTypes.add(AtlasResourceTypes.TYPE);
} else if ((api.startsWith("discovery") && api.contains("gremlin")) || api.startsWith("admin")
} else if ((api.startsWith("discovery") && contextPath.contains("gremlin")) || api.startsWith("admin")
|| api.startsWith("graph")) {
resourceTypes.add(AtlasResourceTypes.OPERATION);
} else if ((api.startsWith("entities") && contextPath.contains("traits")) || api.startsWith("discovery")) {
......@@ -107,6 +109,21 @@ public class AtlasAuthorizationUtils {
resourceTypes.add(AtlasResourceTypes.TYPE);
} else if (api.startsWith("entities") || api.startsWith("lineage")) {
resourceTypes.add(AtlasResourceTypes.ENTITY);
} else if (api.startsWith("v1/taxonomies")) {
resourceTypes.add(AtlasResourceTypes.TAXONOMY);
// taxonomies are modeled as entities
resourceTypes.add(AtlasResourceTypes.ENTITY);
if (contextPath.contains("terms")) {
resourceTypes.add(AtlasResourceTypes.TERM);
// terms are modeled as traits
resourceTypes.add(AtlasResourceTypes.TYPE);
}
} else if (api.startsWith("v1/entities")) {
resourceTypes.add(AtlasResourceTypes.ENTITY);
if (contextPath.contains("tags")) {
// tags are modeled as traits
resourceTypes.add(AtlasResourceTypes.TYPE);
}
} else {
LOG.error("Unable to find Atlas Resource corresponding to : " + api);
throw new ServletException("Unable to find Atlas Resource corresponding to : " + api);
......
......@@ -19,5 +19,5 @@
package org.apache.atlas.authorize;
public enum AtlasResourceTypes {
ENTITY, TYPE, OPERATION;
ENTITY, TYPE, OPERATION, TAXONOMY, TERM;
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import org.apache.atlas.catalog.*;
import org.apache.atlas.catalog.exception.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.UriInfo;
import javax.xml.bind.annotation.XmlRootElement;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.Collection;
import java.util.Map;
/**
* Base class for all v1 API services.
*/
public abstract class BaseService {
private static final Gson gson = new Gson();
private final Logger LOG = LoggerFactory.getLogger(getClass());
protected Result getResource(ResourceProvider provider, Request request)
throws ResourceNotFoundException {
try {
return provider.getResourceById(request);
} catch (RuntimeException e) {
throw wrapRuntimeException(e);
}
}
protected Result getResources(ResourceProvider provider, Request request)
throws ResourceNotFoundException, InvalidQueryException {
try {
return provider.getResources(request);
} catch (RuntimeException e) {
throw wrapRuntimeException(e);
}
}
protected void createResource(ResourceProvider provider, Request request) throws CatalogException {
try {
provider.createResource(request);
} catch (RuntimeException e) {
throw wrapRuntimeException(e);
}
}
protected Collection<String> createResources(ResourceProvider provider, Request request) throws CatalogException {
try {
return provider.createResources(request);
} catch (RuntimeException e) {
throw wrapRuntimeException(e);
}
}
protected String getQueryString(@Context UriInfo ui) {
String uri = ui.getRequestUri().toASCIIString();
int qsBegin = uri.indexOf("?");
return (qsBegin == -1) ? null : uri.substring(qsBegin + 1);
}
protected <T extends Map> T parsePayload(String body) throws InvalidPayloadException {
T properties;
try {
properties = gson.<T>fromJson(body, Map.class);
} catch (JsonSyntaxException e) {
LOG.info("Unable to parse json in request body", e);
throw new InvalidPayloadException("Request payload contains invalid JSON: " + e.getMessage());
}
return properties;
}
private RuntimeException wrapRuntimeException(RuntimeException e) {
return e instanceof CatalogRuntimeException ? e : new CatalogRuntimeException(e);
}
protected String decode(String s) throws CatalogException {
try {
return s == null ? null : URLDecoder.decode(s, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new CatalogException("Unable to decode URL: " + e.getMessage(), 500);
}
}
@XmlRootElement
// the name of this class is used as the collection name in the returned json when returning a collection
public static class Results {
public String href;
public int status;
public Results() {
// required by JAXB
}
public Results(String href, int status) {
this.href = href;
this.status = status;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import org.apache.atlas.catalog.exception.CatalogException;
import org.apache.atlas.web.util.Servlets;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import javax.xml.bind.annotation.XmlRootElement;
/**
* Exception mapper for CatalogException.
*/
@Provider
public class CatalogExceptionMapper implements ExceptionMapper<CatalogException> {
@Override
public Response toResponse(CatalogException e) {
return Response.status(e.getStatus()).entity(
new ErrorBean(e)).type(Servlets.JSON_MEDIA_TYPE).build();
}
@XmlRootElement
public static class ErrorBean {
public int status;
public String message;
public ErrorBean() {
// required for JAXB
}
public ErrorBean(CatalogException ex) {
this.status = ex.getStatus();
this.message = ex.getMessage();
}
public int getStatus() {
return status;
}
public String getMessage() {
return message;
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import org.apache.atlas.catalog.exception.CatalogRuntimeException;
import org.apache.atlas.web.util.Servlets;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import javax.xml.bind.annotation.XmlRootElement;
import java.io.PrintWriter;
import java.io.StringWriter;
/**
* Exception mapper for CatalogRuntimeException
*/
@Provider
public class CatalogRuntimeExceptionMapper implements ExceptionMapper<CatalogRuntimeException> {
@Override
public Response toResponse(CatalogRuntimeException e) {
return Response.status(e.getStatusCode()).entity(
new ErrorBean(e)).type(Servlets.JSON_MEDIA_TYPE).build();
}
@XmlRootElement
public static class ErrorBean {
private static final String MSG_PREFIX = "An unexpected error has occurred. ";
public int status;
public String message;
public String stackTrace;
//todo: error code, developerMsg ...
public ErrorBean() {
// required for JAXB
}
public ErrorBean(CatalogRuntimeException ex) {
this.status = 500;
this.message = String.format("%s%s : %s", MSG_PREFIX, ex.toString(), ex.getCause().toString());
this.stackTrace = getStackTraceFromException(ex);
}
public int getStatus() {
return status;
}
public String getMessage() {
return message;
}
public String getStackTrace() {
return stackTrace;
}
private String getStackTraceFromException(RuntimeException e) {
StringWriter sw = new StringWriter();
e.printStackTrace(new PrintWriter(sw));
return sw.toString();
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import org.apache.atlas.catalog.*;
import org.apache.atlas.catalog.exception.CatalogException;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.web.util.Servlets;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.*;
import javax.ws.rs.core.*;
import java.util.*;
/**
* Service which handles API requests for v1 entity resources.
*/
@Path("v1/entities")
@Singleton
public class EntityService extends BaseService {
private final EntityResourceProvider entityResourceProvider;
private final EntityTagResourceProvider entityTagResourceProvider;
private static JsonSerializer m_serializer = new JsonSerializer();
@Inject
public EntityService(MetadataService metadataService) {
DefaultTypeSystem typeSystem = new DefaultTypeSystem(metadataService);
entityResourceProvider = new EntityResourceProvider(typeSystem);
entityTagResourceProvider = new EntityTagResourceProvider(typeSystem);
}
@GET
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getEntities(@Context HttpHeaders headers, @Context UriInfo ui) throws CatalogException {
String queryString = decode(getQueryString(ui));
BaseRequest request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
Result result = getResources(entityResourceProvider, request);
return Response.status(Response.Status.OK).entity(m_serializer.serialize(result, ui)).build();
}
@GET
@Path("{entityId}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getEntity(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("entityId") String entityId) throws CatalogException {
BaseRequest request = new InstanceRequest(Collections.<String, Object>singletonMap("id", entityId));
Result result = getResource(entityResourceProvider, request);
return Response.status(Response.Status.OK).entity(m_serializer.serialize(result, ui)).build();
}
@GET
@Path("{entityId}/tags/{tag}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getEntityTag(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("entityId") String entityId,
@PathParam("tag") String tagName) throws CatalogException {
Map<String, Object> properties = new HashMap<>();
properties.put("id", entityId);
properties.put("name", tagName);
Result result = getResource(entityTagResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(m_serializer.serialize(result, ui)).build();
}
@GET
@Path("{entityId}/tags")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getEntityTags(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("entityId") String entityGuid) throws CatalogException {
BaseRequest request = new CollectionRequest(Collections.<String, Object>singletonMap("id", entityGuid),
decode(getQueryString(ui)));
Result result = getResources(entityTagResourceProvider, request);
return Response.status(Response.Status.OK).entity(m_serializer.serialize(result, ui)).build();
}
@POST
@Path("{entityId}/tags/{tag}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response tagEntity(String body,
@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("entityId") String entityId,
@PathParam("tag") String tagName) throws CatalogException {
Map<String, Object> properties = new HashMap<>();
properties.put("id", entityId);
properties.put("name", tagName);
createResource(entityTagResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.CREATED).entity(
new Results(ui.getRequestUri().toString(), 201)).build();
}
@POST
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response tagEntities(String body,
@Context HttpHeaders headers,
@Context UriInfo ui) throws CatalogException {
Map<String, Object> properties = parsePayload(body);
if (properties.get("tags") == null || properties.size() != 1) {
throw new CatalogException(
"Invalid Request, no 'tags' property specified. Creation of entity resource not supported.", 400);
}
String queryString = decode(getQueryString(ui));
Collection<String> createResults = createResources(
entityTagResourceProvider, new CollectionRequest(properties, queryString));
Collection<Results> result = new ArrayList<>();
for (String relativeUrl : createResults) {
result.add(new Results(ui.getBaseUri().toString() + relativeUrl, 201));
}
return Response.status(Response.Status.CREATED).entity(
new GenericEntity<Collection<Results>>(result) {}).build();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import org.apache.atlas.catalog.*;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.exception.CatalogException;
import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.web.util.Servlets;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.*;
import javax.ws.rs.core.*;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Service which handles API requests for taxonomy and term resources.
*/
@Path("v1/taxonomies")
@Singleton
public class TaxonomyService extends BaseService {
private final TaxonomyResourceProvider taxonomyResourceProvider;
private static TermResourceProvider termResourceProvider;
private static JsonSerializer serializer = new JsonSerializer();
@Inject
public TaxonomyService(MetadataService metadataService) {
DefaultTypeSystem typeSystem = new DefaultTypeSystem(metadataService);
taxonomyResourceProvider = new TaxonomyResourceProvider(typeSystem);
termResourceProvider = new TermResourceProvider(typeSystem);
}
@GET
@Path("{taxonomyName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getTaxonomy(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName) throws CatalogException {
Map<String, Object> properties = new HashMap<>();
properties.put("name", taxonomyName);
Result result = getResource(taxonomyResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(serializer.serialize(result, ui)).build();
}
@GET
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getTaxonomies(@Context HttpHeaders headers, @Context UriInfo ui) throws CatalogException {
String queryString = decode(getQueryString(ui));
Request request = new CollectionRequest(Collections.<String, Object>emptyMap(), queryString);
Result result = getResources(taxonomyResourceProvider, request);
return Response.status(Response.Status.OK).entity(serializer.serialize(result, ui)).build();
}
@POST
@Path("{taxonomyName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response createTaxonomy(String body,
@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName) throws CatalogException {
Map<String, Object> properties = parsePayload(body);
properties.put("name", taxonomyName);
createResource(taxonomyResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.CREATED).entity(
new Results(ui.getRequestUri().toString(), 201)).build();
}
@GET
@Path("{taxonomyName}/terms/{termName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getTaxonomyTerm(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("termName") String termName) throws CatalogException {
TermPath termPath = new TermPath(taxonomyName, termName);
Map<String, Object> properties = new HashMap<>();
properties.put("termPath", termPath);
Result result = getResource(termResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.OK).entity(serializer.serialize(result, ui)).build();
}
@GET
@Path("{taxonomyName}/terms")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getTaxonomyTerms(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName) throws CatalogException {
String queryString = decode(getQueryString(ui));
TermPath termPath = new TermPath(taxonomyName, null);
Request request = new CollectionRequest(
Collections.<String, Object>singletonMap("termPath", termPath), queryString);
Result result = getResources(termResourceProvider, request);
return Response.status(Response.Status.OK).entity(serializer.serialize(result, ui)).build();
}
@GET
@Path("{taxonomyName}/terms/{rootTerm}/{remainder:.*}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response getSubTerms(@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("rootTerm") String rootTerm,
@PathParam("remainder") String remainder) throws CatalogException {
Result result;
List<PathSegment> pathSegments = ui.getPathSegments();
int lastIndex = pathSegments.size() - 1;
String lastSegment = pathSegments.get(lastIndex).getPath();
String termName = String.format("%s%s", rootTerm,
remainder.replaceAll("/?terms/?([.]*)", "$1."));
String queryString = decode(getQueryString(ui));
TermPath termPath = new TermPath(taxonomyName, termName);
Map<String, Object> properties = new HashMap<>();
properties.put("termPath", termPath);
if (lastSegment.equals("terms") || (lastSegment.isEmpty() && pathSegments.get(lastIndex - 1).getPath().equals("terms"))) {
result = getResources(termResourceProvider, new CollectionRequest(properties, queryString));
} else {
result = getResource(termResourceProvider, new InstanceRequest(properties));
}
return Response.status(Response.Status.OK).entity(serializer.serialize(result, ui)).build();
}
@POST
@Path("{taxonomyName}/terms/{termName}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response createTerm(String body,
@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("termName") String termName) throws CatalogException {
Map<String, Object> properties = parsePayload(body);
validateName(termName);
properties.put("termPath", new TermPath(taxonomyName, termName));
createResource(termResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.CREATED).entity(
new Results(ui.getRequestUri().toString(), 201)).build();
}
@POST
@Path("{taxonomyName}/terms/{termName}/{remainder:.*}")
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response createSubTerms(String body,
@Context HttpHeaders headers,
@Context UriInfo ui,
@PathParam("taxonomyName") String taxonomyName,
@PathParam("termName") String termName,
@PathParam("remainder") String remainder) throws CatalogException {
Map<String, Object> properties = parsePayload(body);
String[] pathTokens = remainder.split("/");
validateName(pathTokens[pathTokens.length -1]);
properties.put("termPath", new TermPath(taxonomyName, String.format("%s%s", termName,
remainder.replaceAll("/?terms/?([.]*)", "$1."))));
createResource(termResourceProvider, new InstanceRequest(properties));
return Response.status(Response.Status.CREATED).entity(
new Results(ui.getRequestUri().toString(), 201)).build();
}
private void validateName(String name) throws InvalidPayloadException {
if (name.contains(".")) {
throw new InvalidPayloadException("The \"name\" property may not contain the character '.'");
}
}
}
......@@ -30,6 +30,11 @@
org.apache.atlas.web.resources,org.apache.atlas.web.params
</param-value>
</context-param>
<context-param>
<param-name>com.sun.jersey.api.json.POJOMappingFeature</param-name>
<param-value>true</param-value>
</context-param>
<!--
More information can be found here:
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.authorize;
import org.testng.annotations.Test;
import java.util.List;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
/**
* Unit tests for AtlasAuthorizationUtils.
*/
public class AtlasAuthorizationUtilsTest {
@Test
public void testGetApi() {
String contextPath = "/api/atlas/entities";
assertEquals(AtlasAuthorizationUtils.getApi(contextPath), "entities");
contextPath = "/api/atlas/entities/111/traits";
assertEquals(AtlasAuthorizationUtils.getApi(contextPath), "entities");
contextPath = "/api/atlas/v1/entities";
assertEquals(AtlasAuthorizationUtils.getApi(contextPath), "v1/entities");
contextPath = "/api/atlas/v1/entities/111/tags";
assertEquals(AtlasAuthorizationUtils.getApi(contextPath), "v1/entities");
// not sure of this use case but the code appears to support url's that don't
// begin with base url.
contextPath = "/foo/bar";
assertEquals(AtlasAuthorizationUtils.getApi(contextPath), "foo");
}
@Test
public void testGetAtlasResourceType() throws Exception {
String contextPath = "/api/atlas/types";
List<AtlasResourceTypes> resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 1);
assertTrue(resourceTypes.contains(AtlasResourceTypes.TYPE));
contextPath = "/api/atlas/admin/foo";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 1);
assertTrue(resourceTypes.contains(AtlasResourceTypes.OPERATION));
contextPath = "/api/atlas/graph/foo";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 1);
assertTrue(resourceTypes.contains(AtlasResourceTypes.OPERATION));
contextPath = "/api/atlas/discovery/search/gremlin";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 1);
assertTrue(resourceTypes.contains(AtlasResourceTypes.OPERATION));
contextPath = "/api/atlas/entities/111/traits";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 2);
assertTrue(resourceTypes.contains(AtlasResourceTypes.ENTITY));
assertTrue(resourceTypes.contains(AtlasResourceTypes.TYPE));
contextPath = "/api/atlas/discovery/search";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 2);
assertTrue(resourceTypes.contains(AtlasResourceTypes.ENTITY));
assertTrue(resourceTypes.contains(AtlasResourceTypes.TYPE));
contextPath = "/api/atlas/entities?type=Column";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 1);
assertTrue(resourceTypes.contains(AtlasResourceTypes.ENTITY));
contextPath = "/api/atlas/lineage";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 1);
assertTrue(resourceTypes.contains(AtlasResourceTypes.ENTITY));
contextPath = "/api/atlas/v1/taxonomies";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 2);
assertTrue(resourceTypes.contains(AtlasResourceTypes.TAXONOMY));
assertTrue(resourceTypes.contains(AtlasResourceTypes.ENTITY));
contextPath = "/api/atlas/v1/taxonomies/taxonomy1/terms";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 4);
assertTrue(resourceTypes.contains(AtlasResourceTypes.TAXONOMY));
assertTrue(resourceTypes.contains(AtlasResourceTypes.ENTITY));
assertTrue(resourceTypes.contains(AtlasResourceTypes.TERM));
assertTrue(resourceTypes.contains(AtlasResourceTypes.TYPE));
contextPath = "/api/atlas/v1/entities/111";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 1);
assertTrue(resourceTypes.contains(AtlasResourceTypes.ENTITY));
contextPath = "/api/atlas/v1/entities/111/tags/foo";
resourceTypes = AtlasAuthorizationUtils.getAtlasResourceType(contextPath);
assertEquals(resourceTypes.size(), 2);
assertTrue(resourceTypes.contains(AtlasResourceTypes.ENTITY));
assertTrue(resourceTypes.contains(AtlasResourceTypes.TYPE));
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment