Commit aeec7618 by Shwetha GS

ATLAS-383 tests for classtype.convert() with id (sumasai via shwethags)

parent 30a2ec1f
......@@ -5,6 +5,7 @@ Apache Atlas Release Notes
INCOMPATIBLE CHANGES:
ALL CHANGES:
ATLAS-383 tests for classtype.convert() with id (sumasai via shwethags)
ATLAS-263 Searching for a multi word trait always returns empty result (girishrp via shwethags)
--Release 0.6-incubating
......
......@@ -187,12 +187,14 @@ public final class TestUtils {
public static final String DATABASE_TYPE = "hive_database";
public static final String DATABASE_NAME = "foo";
public static final String TABLE_TYPE = "hive_table";
public static final String PARTITION_TYPE = "partition_type";
public static final String SERDE_TYPE = "serdeType";
public static final String TABLE_NAME = "bar";
public static final String CLASSIFICATION = "classification";
public static final String PII = "PII";
public static final String SUPER_TYPE_NAME = "Base";
public static final String STORAGE_DESC_TYPE = "hive_storagedesc";
public static final String PARTITION_STRUCT_TYPE = "partition_struct_type";
public static final String PARTITION_CLASS_TYPE = "partition_class_type";
public static final String SERDE_TYPE = "serdeType";
public static TypesDef defineHiveTypes() {
HierarchicalTypeDefinition<ClassType> superTypeDefinition =
......@@ -222,9 +224,46 @@ public final class TestUtils {
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("type", DataTypes.STRING_TYPE));
StructTypeDefinition partitionDefinition = new StructTypeDefinition("partition_type",
StructTypeDefinition partitionDefinition = new StructTypeDefinition("partition_struct_type",
new AttributeDefinition[]{createRequiredAttrDef("name", DataTypes.STRING_TYPE),});
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("cols", String.format("array<%s>", "column_type"),
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("outputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("numBuckets", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
};
HierarchicalTypeDefinition<ClassType> storageDescClsDef =
new HierarchicalTypeDefinition<>(ClassType.class, STORAGE_DESC_TYPE,
ImmutableList.of(SUPER_TYPE_NAME), attributeDefinitions);
AttributeDefinition[] partClsAttributes = new AttributeDefinition[]{
new AttributeDefinition("values", DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("table", TABLE_TYPE, Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("sd", STORAGE_DESC_TYPE, Multiplicity.REQUIRED, true,
null),
new AttributeDefinition("columns", DataTypes.arrayTypeName("column_type"),
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("parameters", new DataTypes.MapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.OPTIONAL, false, null),};
HierarchicalTypeDefinition<ClassType> partClsDef =
new HierarchicalTypeDefinition<>(ClassType.class, "partition_class_type",
ImmutableList.of(SUPER_TYPE_NAME), partClsAttributes);
HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
createClassTypeDef(TABLE_TYPE, ImmutableList.of(SUPER_TYPE_NAME),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
......@@ -241,7 +280,7 @@ public final class TestUtils {
new AttributeDefinition("columns", String.format("array<%s>", "column_type"),
Multiplicity.OPTIONAL, true, null),
// array of structs
new AttributeDefinition("partitions", String.format("array<%s>", "partition_type"),
new AttributeDefinition("partitions", String.format("array<%s>", "partition_struct_type"),
Multiplicity.OPTIONAL, true, null),
// map of primitives
new AttributeDefinition("parametersMap",
......@@ -255,7 +294,7 @@ public final class TestUtils {
//map of structs
new AttributeDefinition("partitionsMap",
DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
"partition_type"),
"partition_struct_type"),
Multiplicity.OPTIONAL, true, null),
// struct reference
new AttributeDefinition("serde1", "serdeType", Multiplicity.OPTIONAL, false, null),
......@@ -279,7 +318,7 @@ public final class TestUtils {
return TypesUtil.getTypesDef(ImmutableList.of(enumTypeDefinition),
ImmutableList.of(structTypeDefinition, partitionDefinition),
ImmutableList.of(classificationTypeDefinition, fetlClassificationTypeDefinition, piiTypeDefinition),
ImmutableList.of(superTypeDefinition, databaseTypeDefinition, columnsDefinition, tableTypeDefinition));
ImmutableList.of(superTypeDefinition, databaseTypeDefinition, columnsDefinition, tableTypeDefinition, storageDescClsDef, partClsDef));
}
public static Collection<IDataType> createHiveTypes(TypeSystem typeSystem) throws Exception {
......
......@@ -561,7 +561,7 @@ public class GraphBackedMetadataRepositoryTest {
// HashMap<String, Struct> partitionsMap = new HashMap<>();
ArrayList<Struct> partitions = new ArrayList<>();
for (int index = 0; index < 5; index++) {
Struct partitionInstance = new Struct("partition_type");
Struct partitionInstance = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
final String name = "partition_" + index;
partitionInstance.set("name", name);
......
......@@ -215,7 +215,7 @@ public class GraphRepoMapperScaleTest {
ArrayList<Struct> partitions = new ArrayList<>();
for (int index = 0; index < 5; index++) {
Struct partitionInstance = new Struct("partition_type");
Struct partitionInstance = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
partitionInstance.set("name", "partition_" + "-" + uberIndex + "-" + index);
partitions.add(partitionInstance);
}
......
......@@ -22,6 +22,10 @@ import com.google.common.collect.ImmutableList;
import com.google.inject.Inject;
import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.util.TitanCleanup;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.typesystem.exception.TypeNotFoundException;
import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.utils.ParamChecker;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.TestUtils;
import org.apache.atlas.repository.graph.GraphProvider;
......@@ -29,15 +33,12 @@ import org.apache.atlas.services.MetadataService;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.TypesDef;
import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.typesystem.exception.TypeNotFoundException;
import org.apache.atlas.typesystem.json.InstanceSerialization;
import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.typesystem.types.ValueConversionException;
import org.apache.atlas.utils.ParamChecker;
import org.apache.commons.lang.RandomStringUtils;
import org.codehaus.jettison.json.JSONArray;
import org.testng.Assert;
......@@ -208,7 +209,7 @@ public class DefaultMetadataServiceTest {
public void testUpdateEntityWithMap() throws Exception {
final Map<String, Struct> partsMap = new HashMap<>();
partsMap.put("part0", new Struct("partition_type",
partsMap.put("part0", new Struct(TestUtils.PARTITION_STRUCT_TYPE,
new HashMap<String, Object>() {{
put("name", "test");
}}));
......@@ -223,7 +224,7 @@ public class DefaultMetadataServiceTest {
Assert.assertTrue(partsMap.get("part0").equalsContents(((Map<String, Struct>)tableDefinition.get("partitionsMap")).get("part0")));
//update map - add a map key
partsMap.put("part1", new Struct("partition_type",
partsMap.put("part1", new Struct(TestUtils.PARTITION_STRUCT_TYPE,
new HashMap<String, Object>() {{
put("name", "test1");
}}));
......@@ -239,7 +240,7 @@ public class DefaultMetadataServiceTest {
//update map - remove a key and add another key
partsMap.remove("part0");
partsMap.put("part2", new Struct("partition_type",
partsMap.put("part2", new Struct(TestUtils.PARTITION_STRUCT_TYPE,
new HashMap<String, Object>() {{
put("name", "test2");
}}));
......@@ -440,6 +441,29 @@ public class DefaultMetadataServiceTest {
Assert.assertNull(((Struct)tableDefinition.get("serde1")).get("description"));
}
@Test
public void testCreateEntityWithReferenceableHavingIdNoValue() throws Exception {
//ATLAS-383 Test
Referenceable sdReferenceable = new Referenceable(TestUtils.STORAGE_DESC_TYPE);
sdReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, TestUtils.randomString());
sdReferenceable.set("compressed", "false");
sdReferenceable.set("location", "hdfs://tmp/hive-user");
String sdGuid = createInstance(sdReferenceable);
Referenceable sdRef2 = new Referenceable(sdGuid, TestUtils.STORAGE_DESC_TYPE, null);
Referenceable partRef = new Referenceable(TestUtils.PARTITION_CLASS_TYPE);
partRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "part-unique");
partRef.set("values", ImmutableList.of("2014-10-01"));
partRef.set("table", table);
partRef.set("sd", sdRef2);
String partGuid = createInstance(partRef);
Assert.assertNotNull(partGuid);
}
@Test
public void testClassUpdate() throws Exception {
//Create new db instance
......@@ -486,10 +510,10 @@ public class DefaultMetadataServiceTest {
//Add array of structs
TestUtils.dumpGraph(graphProvider.get());
final Struct partition1 = new Struct(TestUtils.PARTITION_TYPE);
final Struct partition1 = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
partition1.set("name", "part1");
final Struct partition2 = new Struct(TestUtils.PARTITION_TYPE);
final Struct partition2 = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
partition2.set("name", "part2");
List<Struct> partitions = new ArrayList<Struct>(){{ add(partition1); add(partition2); }};
......@@ -508,7 +532,7 @@ public class DefaultMetadataServiceTest {
Assert.assertTrue(partitions.get(0).equalsContents(partitionsActual.get(0)));
//add a new element to array of struct
final Struct partition3 = new Struct(TestUtils.PARTITION_TYPE);
final Struct partition3 = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
partition3.set("name", "part3");
partitions.add(partition3);
table.set("partitions", partitions);
......@@ -555,7 +579,7 @@ public class DefaultMetadataServiceTest {
Assert.assertTrue(partitions.get(0).equalsContents(partitionsActual.get(0)));
//add a repeated element to array of struct
final Struct partition4 = new Struct(TestUtils.PARTITION_TYPE);
final Struct partition4 = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
partition4.set("name", "part4");
partitions.add(partition4);
table.set("partitions", partitions);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment