Commit 8cd74965 by nikhilbonte Committed by nixonrodrigues

ATLAS-3283 Export-import UTs are getting skipped

parent 08b76391
......@@ -66,11 +66,15 @@ public class ExportIncrementalTest extends ExportImportTestBase {
private final String EXPORT_REQUEST_INCREMENTAL = "export-incremental";
private final String EXPORT_REQUEST_CONNECTED = "export-connected";
private AtlasClassificationType classificationTypeT1;
private long nextTimestamp;
@BeforeClass
public void setup() throws IOException, AtlasBaseException {
basicSetup(typeDefStore, typeRegistry);
RequestContext.get().setImportInProgress(true);
classificationTypeT1 = createNewClassification();
createEntities(entityStore, ENTITIES_SUB_DIR, new String[] { "db", "table-columns"});
final String[] entityGuids = {DB_GUID, TABLE_GUID};
verifyCreatedEntities(entityStore, entityGuids, 2);
......@@ -108,8 +112,7 @@ public class ExportIncrementalTest extends ExportImportTestBase {
public void atT1_NewClassificationAttachedToTable_ReturnsChangedTable() throws AtlasBaseException {
final int expectedEntityCount = 1;
AtlasClassificationType ct = createNewClassification();
entityStore.addClassifications(TABLE_GUID, ImmutableList.of(ct.createDefaultValue()));
entityStore.addClassifications(TABLE_GUID, ImmutableList.of(classificationTypeT1.createDefaultValue()));
AtlasExportRequest request = getIncrementalRequest(nextTimestamp);
ZipSource source = runExportWithParameters(exportService, request);
......@@ -127,7 +130,7 @@ public class ExportIncrementalTest extends ExportImportTestBase {
}
private AtlasClassificationType createNewClassification() {
createTypes(typeDefStore, ENTITIES_SUB_DIR,"typesDef-new-classification");
createTypes(typeDefStore, ENTITIES_SUB_DIR,"typesdef-new-classification");
return typeRegistry.getClassificationTypeByName("T1");
}
......@@ -151,7 +154,6 @@ public class ExportIncrementalTest extends ExportImportTestBase {
long postUpdateTableEntityTimestamp = tableEntity.getEntity().getUpdateTime().getTime();
assertEquals(preExportTableEntityTimestamp, postUpdateTableEntityTimestamp);
nextTimestamp = updateTimesampForNextIncrementalExport(source);
}
@Test(dependsOnMethods = "atT2_NewClassificationAttachedToColumn_ReturnsChangedColumn")
......@@ -179,7 +181,7 @@ public class ExportIncrementalTest extends ExportImportTestBase {
return request;
} catch (IOException e) {
throw new SkipException(String.format("getIncrementalRequest: '%s' could not be laoded.", EXPORT_REQUEST_INCREMENTAL));
throw new SkipException(String.format("getIncrementalRequest: '%s' could not be loaded.", EXPORT_REQUEST_INCREMENTAL));
}
}
......@@ -187,7 +189,7 @@ public class ExportIncrementalTest extends ExportImportTestBase {
try {
return TestResourceFileUtils.readObjectFromJson(ENTITIES_SUB_DIR, EXPORT_REQUEST_CONNECTED, AtlasExportRequest.class);
} catch (IOException e) {
throw new SkipException(String.format("getIncrementalRequest: '%s' could not be laoded.", EXPORT_REQUEST_CONNECTED));
throw new SkipException(String.format("getIncrementalRequest: '%s' could not be loaded.", EXPORT_REQUEST_CONNECTED));
}
}
......
......@@ -72,6 +72,7 @@ public class ExportSkipLineageTest extends ExportImportTestBase {
public void setup() throws IOException, AtlasBaseException {
loadBaseModel(typeDefStore, typeRegistry);
loadHiveModel(typeDefStore, typeRegistry);
RequestContext.get().setImportInProgress(true);
entityStore = new AtlasEntityStoreV2(deleteDelegate, typeRegistry, mockChangeNotifier, graphMapper);
createEntities(entityStore, ENTITIES_SUB_DIR, new String[]{"db", "table-columns", "table-view", "table-table-lineage"});
......@@ -112,7 +113,7 @@ public class ExportSkipLineageTest extends ExportImportTestBase {
return request;
} catch (IOException e) {
throw new SkipException(String.format("getRequest: '%s' could not be laoded.", filename));
throw new SkipException(String.format("getRequest: '%s' could not be loaded.", filename));
}
}
}
......@@ -458,10 +458,6 @@ public class ImportServiceTest extends ExportImportTestBase {
assertEquals(importTransforms.getTransforms().get("hive_table").get("qualifiedName").size(), 2);
}
@Test(dataProvider = "empty-zip", expectedExceptions = AtlasBaseException.class)
public void importEmptyZip(ZipSource zipSource) {
}
@Test(expectedExceptions = AtlasBaseException.class)
public void importEmptyZip() throws IOException, AtlasBaseException {
......
......@@ -19,6 +19,7 @@
package org.apache.atlas.repository.impexp;
import org.apache.atlas.RequestContext;
import org.apache.atlas.TestModules;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.repository.graphdb.AtlasGraph;
......@@ -58,6 +59,7 @@ public class IncrementalExportEntityProviderTest extends ExportImportTestBase {
@BeforeClass
public void setup() throws IOException, AtlasBaseException {
basicSetup(typeDefStore, typeRegistry);
RequestContext.get().setImportInProgress(true);
createEntities(entityStore, ENTITIES_SUB_DIR, new String[] { "db", "table-columns"});
final String[] entityGuids = {DB_GUID, TABLE_GUID};
verifyCreatedEntities(entityStore, entityGuids, 2);
......
......@@ -193,7 +193,7 @@ public class ReplicationEntityAttributeTest extends ExportImportTestBase {
REPLICATED_TO_CLUSTER_NAME = (String) request.getOptions().get(OPTION_KEY_REPLICATED_TO);
return request;
} catch (IOException e) {
throw new SkipException(String.format("getExportRequestWithReplicationOption: '%s' could not be laoded.", EXPORT_REQUEST_FILE));
throw new SkipException(String.format("getExportRequestWithReplicationOption: '%s' could not be loaded.", EXPORT_REQUEST_FILE));
}
}
......@@ -203,7 +203,7 @@ public class ReplicationEntityAttributeTest extends ExportImportTestBase {
REPLICATED_FROM_CLUSTER_NAME = request.getOptions().get(AtlasImportRequest.OPTION_KEY_REPLICATED_FROM);
return request;
} catch (IOException e) {
throw new SkipException(String.format("getExportRequestWithReplicationOption: '%s' could not be laoded.", IMPORT_REQUEST_FILE));
throw new SkipException(String.format("getExportRequestWithReplicationOption: '%s' could not be loaded.", IMPORT_REQUEST_FILE));
}
}
}
......@@ -192,7 +192,7 @@ public class ZipFileResourceTestUtils {
try {
return TestResourceFileUtils.readObjectFromJson(entitiesSubDir, fileName, AtlasTypesDef.class);
} catch (IOException e) {
throw new SkipException(String.format("createTypes: '%s' could not be laoded.", fileName));
throw new SkipException(String.format("createTypes: '%s' could not be loaded.", fileName));
}
}
......@@ -201,7 +201,7 @@ public class ZipFileResourceTestUtils {
try {
return TestResourceFileUtils.readObjectFromJson(entitiesSubDir, fileName, AtlasEntity.AtlasEntityWithExtInfo.class);
} catch (IOException e) {
throw new SkipException(String.format("createTypes: '%s' could not be laoded.", fileName));
throw new SkipException(String.format("createTypes: '%s' could not be loaded.", fileName));
}
}
......@@ -220,7 +220,7 @@ public class ZipFileResourceTestUtils {
assertTrue((response.getCreatedEntities() != null && response.getCreatedEntities().size() > 0) ||
(response.getMutatedEntities() != null && response.getMutatedEntities().size() > 0));
} catch (AtlasBaseException e) {
throw new SkipException(String.format("createAtlasEntity: could not load '%s'.", atlasEntity.getEntity().getTypeName()));
throw new SkipException(String.format("createAtlasEntity: could not loaded '%s'.", atlasEntity.getEntity().getTypeName()));
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment