Commit f694a910 by Vimal Sharma

ATLAS-1315 Fix webapp Integration tests (ayubkhan,apoorvnaik via svimal2106)

parent cea70bc6
...@@ -9,6 +9,7 @@ ATLAS-1060 Add composite indexes for exact match performance improvements for al ...@@ -9,6 +9,7 @@ ATLAS-1060 Add composite indexes for exact match performance improvements for al
ATLAS-1127 Modify creation and modification timestamps to Date instead of Long(sumasai) ATLAS-1127 Modify creation and modification timestamps to Date instead of Long(sumasai)
ALL CHANGES: ALL CHANGES:
ATLAS-1315 Fix webapp Integration tests (ayubkhan,apoorvnaik via svimal2106)
ATLAS-1313 Tests SSLAndKerberosTest.testService and SSLTest.testService are failing (ayubkhan via svimal2106) ATLAS-1313 Tests SSLAndKerberosTest.testService and SSLTest.testService are failing (ayubkhan via svimal2106)
ATLAS-1116 Performance monitoring of backend methods in API requests (shwethags) ATLAS-1116 Performance monitoring of backend methods in API requests (shwethags)
ATLAS-1310 attempt LDAP authentication only when enabled (mneethiraj) ATLAS-1310 attempt LDAP authentication only when enabled (mneethiraj)
......
...@@ -64,6 +64,7 @@ public class EntityNotificationIT extends BaseResourceIT { ...@@ -64,6 +64,7 @@ public class EntityNotificationIT extends BaseResourceIT {
@Inject @Inject
private NotificationInterface notificationInterface; private NotificationInterface notificationInterface;
private Id tableId; private Id tableId;
private Id dbId;
private String traitName; private String traitName;
private NotificationConsumer<EntityNotification> notificationConsumer; private NotificationConsumer<EntityNotification> notificationConsumer;
...@@ -71,6 +72,8 @@ public class EntityNotificationIT extends BaseResourceIT { ...@@ -71,6 +72,8 @@ public class EntityNotificationIT extends BaseResourceIT {
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
createTypeDefinitions(); createTypeDefinitions();
Referenceable HiveDBInstance = createHiveDBInstance(DATABASE_NAME);
dbId = createInstance(HiveDBInstance);
List<NotificationConsumer<EntityNotification>> consumers = List<NotificationConsumer<EntityNotification>> consumers =
notificationInterface.createConsumers(NotificationInterface.NotificationType.ENTITIES, 1); notificationInterface.createConsumers(NotificationInterface.NotificationType.ENTITIES, 1);
...@@ -80,11 +83,7 @@ public class EntityNotificationIT extends BaseResourceIT { ...@@ -80,11 +83,7 @@ public class EntityNotificationIT extends BaseResourceIT {
@Test @Test
public void testCreateEntity() throws Exception { public void testCreateEntity() throws Exception {
Referenceable hiveDBInstance = createHiveDBInstance(DATABASE_NAME); Referenceable tableInstance = createHiveTableInstance(DATABASE_NAME, TABLE_NAME, dbId);
Id dbID = createInstance(hiveDBInstance);
hiveDBInstance.replaceWithNewId(dbID);
Referenceable tableInstance = createHiveTableInstance(hiveDBInstance, TABLE_NAME);
tableId = createInstance(tableInstance); tableId = createInstance(tableInstance);
final String guid = tableId._getId(); final String guid = tableId._getId();
...@@ -110,11 +109,10 @@ public class EntityNotificationIT extends BaseResourceIT { ...@@ -110,11 +109,10 @@ public class EntityNotificationIT extends BaseResourceIT {
public void testDeleteEntity() throws Exception { public void testDeleteEntity() throws Exception {
final String tableName = "table-" + randomString(); final String tableName = "table-" + randomString();
final String dbName = "db-" + randomString(); final String dbName = "db-" + randomString();
Referenceable hiveDBInstance = createHiveDBInstance(dbName); Referenceable HiveDBInstance = createHiveDBInstance(dbName);
Id dbID = createInstance(hiveDBInstance); Id dbId = createInstance(HiveDBInstance);
hiveDBInstance.replaceWithNewId(dbID);
Referenceable tableInstance = createHiveTableInstance(hiveDBInstance, tableName); Referenceable tableInstance = createHiveTableInstance(dbName, tableName, dbId);
final Id tableId = createInstance(tableInstance); final Id tableId = createInstance(tableInstance);
final String guid = tableId._getId(); final String guid = tableId._getId();
......
...@@ -56,7 +56,10 @@ import org.slf4j.LoggerFactory; ...@@ -56,7 +56,10 @@ import org.slf4j.LoggerFactory;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Collections;
import java.util.Map;
/** /**
* Base class for integration tests. * Base class for integration tests.
...@@ -90,11 +93,32 @@ public abstract class BaseResourceIT { ...@@ -90,11 +93,32 @@ public abstract class BaseResourceIT {
} }
protected void createType(TypesDef typesDef) throws Exception { protected void createType(TypesDef typesDef) throws Exception {
try { try{
if ( !typesDef.enumTypes().isEmpty() ){
String sampleType = typesDef.enumTypesAsJavaList().get(0).name;
serviceClient.getType(sampleType);
LOG.info("Checking enum type existence");
}
else if( !typesDef.structTypes().isEmpty()){
StructTypeDefinition sampleType = typesDef.structTypesAsJavaList().get(0);
serviceClient.getType(sampleType.typeName);
LOG.info("Checking struct type existence");
}
else if( !typesDef.traitTypes().isEmpty()){
HierarchicalTypeDefinition<TraitType> sampleType = typesDef.traitTypesAsJavaList().get(0);
serviceClient.getType(sampleType.typeName);
LOG.info("Checking trait type existence");
}
else{
HierarchicalTypeDefinition<ClassType> sampleType = typesDef.classTypesAsJavaList().get(0);
serviceClient.getType(sampleType.typeName);
LOG.info("Checking class type existence");
}
LOG.info("Types already exist. Skipping type creation");
} catch(AtlasServiceException ase) {
//Expected if type doesnt exist
String typesAsJSON = TypesSerialization.toJson(typesDef); String typesAsJSON = TypesSerialization.toJson(typesDef);
createType(typesAsJSON); createType(typesAsJSON);
} catch(AtlasServiceException ase) {
LOG.info("Types failed. Tests might malfunction");
} }
} }
...@@ -118,6 +142,24 @@ public abstract class BaseResourceIT { ...@@ -118,6 +142,24 @@ public abstract class BaseResourceIT {
return null; return null;
} }
protected TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums,
ImmutableList<StructTypeDefinition> structs,
ImmutableList<HierarchicalTypeDefinition<TraitType>> traits,
ImmutableList<HierarchicalTypeDefinition<ClassType>> classes){
enums = (enums != null) ? enums : ImmutableList
.<EnumTypeDefinition>of();
structs =
(structs != null) ? structs : ImmutableList.<StructTypeDefinition>of();
traits = (traits != null) ? traits : ImmutableList
.<HierarchicalTypeDefinition<TraitType>>of();
classes = (classes != null) ? classes : ImmutableList
.<HierarchicalTypeDefinition<ClassType>>of();
return TypesUtil.getTypesDef(enums, structs, traits, classes);
}
protected static final String DATABASE_TYPE = "hive_db"; protected static final String DATABASE_TYPE = "hive_db";
protected static final String HIVE_TABLE_TYPE = "hive_table"; protected static final String HIVE_TABLE_TYPE = "hive_table";
protected static final String COLUMN_TYPE = "hive_column"; protected static final String COLUMN_TYPE = "hive_column";
...@@ -179,14 +221,22 @@ public abstract class BaseResourceIT { ...@@ -179,14 +221,22 @@ public abstract class BaseResourceIT {
TypesUtil.createTraitTypeDef("sec", ImmutableSet.<String>of()); TypesUtil.createTraitTypeDef("sec", ImmutableSet.<String>of());
HierarchicalTypeDefinition<TraitType> financeTrait = HierarchicalTypeDefinition<TraitType> financeTrait =
TypesUtil.createTraitTypeDef("finance", ImmutableSet.<String>of()); TypesUtil.createTraitTypeDef("finance", ImmutableSet.<String>of());
/*HierarchicalTypeDefinition<TraitType> factTrait =
TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.of(enumTypeDefinition), TypesUtil.createTraitTypeDef("Fact", ImmutableSet.<String>of());
ImmutableList.of(structTypeDefinition), HierarchicalTypeDefinition<TraitType> etlTrait =
ImmutableList.of(classificationTrait, piiTrait, phiTrait, pciTrait, soxTrait, secTrait, financeTrait), TypesUtil.createTraitTypeDef("ETL", ImmutableSet.<String>of());
// ImmutableList.<HierarchicalTypeDefinition<ClassType>>of()); HierarchicalTypeDefinition<TraitType> dimensionTrait =
ImmutableList.of(dbClsDef, columnClsDef, tblClsDef, loadProcessClsDef)); TypesUtil.createTraitTypeDef("Dimension", ImmutableSet.<String>of());
HierarchicalTypeDefinition<TraitType> metricTrait =
createType(typesDef); TypesUtil.createTraitTypeDef("Metric", ImmutableSet.<String>of());*/
createType(getTypesDef(ImmutableList.of(enumTypeDefinition), null, null, null));
createType(getTypesDef(null, ImmutableList.of(structTypeDefinition), null, null));
createType(getTypesDef(null, null,
ImmutableList.of(classificationTrait, piiTrait, phiTrait, pciTrait,
soxTrait, secTrait, financeTrait), null));
createType(getTypesDef(null, null, null,
ImmutableList.of(dbClsDef, columnClsDef, tblClsDef, loadProcessClsDef)));
} }
AttributeDefinition attrDef(String name, IDataType dT) { AttributeDefinition attrDef(String name, IDataType dT) {
...@@ -208,7 +258,17 @@ public abstract class BaseResourceIT { ...@@ -208,7 +258,17 @@ public abstract class BaseResourceIT {
return RandomStringUtils.randomAlphanumeric(10); return RandomStringUtils.randomAlphanumeric(10);
} }
protected Referenceable createHiveTableInstance(Referenceable databaseInstance, String tableName) throws Exception { protected Referenceable createHiveTableInstance(String dbName, String tableName, Id dbId) throws Exception {
Map<String, Object> values = new HashMap<>();
values.put("name", dbName);
values.put("description", "foo database");
values.put(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName);
values.put("owner", "user1");
values.put("clusterName", "cl1");
values.put("parameters", Collections.EMPTY_MAP);
values.put("location", "/tmp");
Referenceable databaseInstance = new Referenceable(dbId._getId(), dbId.getTypeName(), values);
Referenceable tableInstance = Referenceable tableInstance =
new Referenceable(HIVE_TABLE_TYPE, "classification", "pii", "phi", "pci", "sox", "sec", "finance"); new Referenceable(HIVE_TABLE_TYPE, "classification", "pii", "phi", "pci", "sox", "sec", "finance");
tableInstance.set("name", tableName); tableInstance.set("name", tableName);
...@@ -246,6 +306,11 @@ public abstract class BaseResourceIT { ...@@ -246,6 +306,11 @@ public abstract class BaseResourceIT {
databaseInstance.set("qualifiedName", dbName); databaseInstance.set("qualifiedName", dbName);
databaseInstance.set("clusterName", randomString()); databaseInstance.set("clusterName", randomString());
databaseInstance.set("description", "foo database"); databaseInstance.set("description", "foo database");
databaseInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName);
databaseInstance.set("owner", "user1");
databaseInstance.set("clusterName", "cl1");
databaseInstance.set("parameters", Collections.EMPTY_MAP);
databaseInstance.set("location", "/tmp");
return databaseInstance; return databaseInstance;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment