Commit 8a32ccaa by ashutoshm Committed by Sarath Subramanian

ATLAS-1709: Unit tests for import and export APIs

parent 42426a1b
......@@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
package org.apache.atlas.repository.impexp;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
......@@ -30,8 +30,8 @@ import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
import org.apache.atlas.model.instance.AtlasObjectId;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.model.typedef.AtlasClassificationDef;
import org.apache.atlas.model.typedef.AtlasEnumDef;
import org.apache.atlas.model.typedef.AtlasEntityDef;
import org.apache.atlas.model.typedef.AtlasEnumDef;
import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
import org.apache.atlas.model.typedef.AtlasTypesDef;
......@@ -40,8 +40,8 @@ import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.repository.store.graph.v1.EntityGraphRetriever;
import org.apache.atlas.type.AtlasArrayType;
import org.apache.atlas.type.AtlasClassificationType;
import org.apache.atlas.type.AtlasEnumType;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasEnumType;
import org.apache.atlas.type.AtlasMapType;
import org.apache.atlas.type.AtlasStructType;
import org.apache.atlas.type.AtlasStructType.AtlasAttribute;
......@@ -86,7 +86,7 @@ public class ExportService {
public AtlasExportResult run(ZipSink exportSink, AtlasExportRequest request, String userName, String hostName,
String requestingIP) throws AtlasBaseException {
long startTime = System.currentTimeMillis();
AtlasExportResult result = new AtlasExportResult(request, userName, hostName, requestingIP, startTime);
AtlasExportResult result = new AtlasExportResult(request, userName, requestingIP, hostName, startTime);
ExportContext context = new ExportContext(result, exportSink);
try {
......
......@@ -15,13 +15,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
package org.apache.atlas.repository.impexp;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.impexp.AtlasImportRequest;
import org.apache.atlas.model.impexp.AtlasImportResult;
import org.apache.atlas.model.typedef.*;
import org.apache.atlas.model.typedef.AtlasClassificationDef;
import org.apache.atlas.model.typedef.AtlasEntityDef;
import org.apache.atlas.model.typedef.AtlasEnumDef;
import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.model.typedef.AtlasTypesDef;
import org.apache.atlas.repository.store.bootstrap.AtlasTypeDefStoreInitializer;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.store.AtlasTypeDefStore;
......
......@@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
package org.apache.atlas.repository.impexp;
public enum ZipExportFileNames {
ATLAS_EXPORT_INFO_NAME("atlas-export-info"),
......
......@@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
package org.apache.atlas.repository.impexp;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.impexp.AtlasExportResult;
......
......@@ -15,9 +15,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
package org.apache.atlas.repository.impexp;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.impexp.AtlasExportResult;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
import org.apache.atlas.model.typedef.AtlasTypesDef;
......@@ -27,9 +28,9 @@ import org.codehaus.jackson.type.TypeReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
......@@ -43,12 +44,12 @@ import static org.apache.atlas.AtlasErrorCode.JSON_ERROR_OBJECT_MAPPER_NULL_RETU
public class ZipSource implements EntityImportStream {
private static final Logger LOG = LoggerFactory.getLogger(ZipSource.class);
private final ByteArrayInputStream inputStream;
private final InputStream inputStream;
private List<String> creationOrder;
private Iterator<String> iterator;
private Map<String, String> guidEntityJsonMap;
public ZipSource(ByteArrayInputStream inputStream) throws IOException {
public ZipSource(InputStream inputStream) throws IOException {
this.inputStream = inputStream;
guidEntityJsonMap = new HashMap<>();
......@@ -63,6 +64,13 @@ public class ZipSource implements EntityImportStream {
return convertFromJson(AtlasTypesDef.class, s);
}
public AtlasExportResult getExportResult() throws AtlasBaseException {
final String fileName = ZipExportFileNames.ATLAS_EXPORT_INFO_NAME.toString();
String s = getFromCache(fileName);
return convertFromJson(AtlasExportResult.class, s);
}
private void setCreationOrder() {
String fileName = ZipExportFileNames.ATLAS_EXPORT_ORDER_NAME.toString();
......@@ -77,8 +85,6 @@ public class ZipSource implements EntityImportStream {
private void updateGuidZipEntryMap() throws IOException {
inputStream.reset();
ZipInputStream zipInputStream = new ZipInputStream(inputStream);
ZipEntry zipEntry = zipInputStream.getNextEntry();
while (zipEntry != null) {
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.impexp;
import com.google.inject.Inject;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
import java.io.IOException;
import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.getZipSource;
import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.loadModelFromJson;
import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.runAndVerifyQuickStart_v1_Import;
@Guice(modules = RepositoryMetadataModule.class)
public class ImportServiceReportingTest {
private static final Logger LOG = LoggerFactory.getLogger(ImportServiceReportingTest.class);
@Inject
AtlasTypeRegistry typeRegistry;
@Inject
private AtlasTypeDefStore typeDefStore;
@Inject
private AtlasEntityStore entityStore;
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.impexp;
import com.google.inject.Inject;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.ITestContext;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
import java.io.IOException;
import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.getZipSource;
import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.loadModelFromJson;
import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.runAndVerifyQuickStart_v1_Import;
@Guice(modules = RepositoryMetadataModule.class)
public class ImportServiceTest {
private static final Logger LOG = LoggerFactory.getLogger(ImportServiceTest.class);
@Inject
AtlasTypeRegistry typeRegistry;
@Inject
private AtlasTypeDefStore typeDefStore;
@Inject
private AtlasEntityStore entityStore;
@DataProvider(name = "sales")
public static Object[][] getDataFromQuickStart_v1_Sales(ITestContext context) throws IOException {
return getZipSource("sales-v1-full.zip");
}
@Test(dataProvider = "sales")
public void importDB1_Succeeds(ZipSource zipSource) throws AtlasBaseException, IOException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry);
runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource);
}
@DataProvider(name = "reporting")
public static Object[][] getDataFromReporting() throws IOException {
return getZipSource("reporting-v1-full.zip");
}
@Test(dataProvider = "reporting")
public void importDB2_Succeeds(ZipSource zipSource) throws AtlasBaseException, IOException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry);
runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource);
}
@DataProvider(name = "logging")
public static Object[][] getDataFromLogging(ITestContext context) throws IOException {
return getZipSource("logging-v1-full.zip");
}
@Test(dataProvider = "logging")
public void importDB3_Succeeds(ZipSource zipSource) throws AtlasBaseException, IOException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry);
runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.impexp;
import com.google.common.collect.Sets;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.impexp.AtlasExportResult;
import org.apache.atlas.model.impexp.AtlasImportRequest;
import org.apache.atlas.model.impexp.AtlasImportResult;
import org.apache.atlas.model.typedef.AtlasTypesDef;
import org.apache.atlas.repository.store.bootstrap.AtlasTypeDefStoreInitializer;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.commons.io.FileUtils;
import org.apache.solr.common.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class ZipFileResourceTestUtils {
public static final Logger LOG = LoggerFactory.getLogger(ZipFileResourceTestUtils.class);
public static FileInputStream getFileInputStream(String fileName) {
final String userDir = System.getProperty("user.dir");
String filePath = getFilePath(userDir, fileName);
File f = new File(filePath);
FileInputStream fs = null;
try {
fs = new FileInputStream(f);
} catch (FileNotFoundException e) {
LOG.error("File could not be found at: %s", filePath, e);
}
return fs;
}
private static String getFilePath(String startPath, String fileName) {
return startPath + "/src/test/resources/" + fileName;
}
public static String getModelJson(String fileName) throws IOException {
final String userDir = System.getProperty("user.dir");
String filePath = userDir + "/../addons/models/" + fileName;
File f = new File(filePath);
String s = FileUtils.readFileToString(f);
Assert.assertFalse(StringUtils.isEmpty(s), "Model file read correctly!");
return s;
}
public static Object[][] getZipSource(String fileName) throws IOException {
FileInputStream fs = ZipFileResourceTestUtils.getFileInputStream(fileName);
return new Object[][]{{new ZipSource(fs)}};
}
public static void verifyImportedEntities(List<String> creationOrder, List<String> processedEntities) {
Set<String> lhs = com.google.common.collect.Sets.newHashSet(creationOrder);
Set<String> rhs = com.google.common.collect.Sets.newHashSet(processedEntities);
Set<String> difference = Sets.difference(lhs, rhs);
Assert.assertNotNull(difference);
Assert.assertEquals(difference.size(), 0);
}
public static void verifyImportedMetrics(AtlasExportResult exportResult, AtlasImportResult importResult) {
Map<String, Integer> metricsForCompare = getImportMetricsForCompare(importResult);
for (Map.Entry<String, Integer> entry : exportResult.getMetrics().entrySet()) {
if(entry.getKey().startsWith("entity") == false ||
entry.getKey().contains("withExtInfo") ||
entry.getKey().contains("Column") ||
entry.getKey().contains("StorageDesc")) continue;
Assert.assertTrue(metricsForCompare.containsKey(entry.getKey()));
Assert.assertEquals(entry.getValue(), metricsForCompare.get(entry.getKey()));
}
}
private static Map<String,Integer> getImportMetricsForCompare(AtlasImportResult result) {
Map<String, Integer> r = new HashMap<>();
for (Map.Entry<String, Integer> entry : result.getMetrics().entrySet()) {
r.put(entry.getKey().replace(":updated", "").replace(":created", ""), entry.getValue());
}
return r;
}
public static void loadModelFromJson(String fileName, AtlasTypeDefStore typeDefStore, AtlasTypeRegistry typeRegistry) throws IOException, AtlasBaseException {
AtlasTypesDef typesFromJson = getAtlasTypesDefFromFile(fileName);
createTypesAsNeeded(typesFromJson, typeDefStore, typeRegistry);
}
private static void createTypesAsNeeded(AtlasTypesDef typesFromJson, AtlasTypeDefStore typeDefStore, AtlasTypeRegistry typeRegistry) throws AtlasBaseException {
AtlasTypesDef typesToCreate = AtlasTypeDefStoreInitializer.getTypesToCreate(typesFromJson, typeRegistry);
if (!typesToCreate.isEmpty()) {
typeDefStore.createTypesDef(typesToCreate);
}
}
private static AtlasTypesDef getAtlasTypesDefFromFile(String fileName) throws IOException {
String sampleTypes = ZipFileResourceTestUtils.getModelJson(fileName);
return AtlasType.fromJson(sampleTypes, AtlasTypesDef.class);
}
public static AtlasImportRequest getDefaultImportRequest() {
return new AtlasImportRequest();
}
public static AtlasImportResult runImportWithParameters(ImportService importService, AtlasImportRequest request, ZipSource source) throws AtlasBaseException, IOException {
final String requestingIP = "1.0.0.0";
final String hostName = "localhost";
final String userName = "admin";
AtlasImportResult result = importService.run(source, request, userName, hostName, requestingIP);
Assert.assertEquals(result.getOperationStatus(), AtlasImportResult.OperationStatus.SUCCESS);
return result;
}
public static void runAndVerifyQuickStart_v1_Import(ImportService importService, ZipSource zipSource) throws AtlasBaseException, IOException {
AtlasExportResult exportResult = zipSource.getExportResult();
List<String> creationOrder = zipSource.getCreationOrder();
AtlasImportRequest request = getDefaultImportRequest();
AtlasImportResult result = runImportWithParameters(importService, request, zipSource);
Assert.assertNotNull(result);
verifyImportedMetrics(exportResult, result);
verifyImportedEntities(creationOrder, result.getProcessedEntities());
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.impexp;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.impexp.AtlasExportRequest;
import org.apache.atlas.model.impexp.AtlasExportResult;
import org.apache.atlas.model.instance.AtlasObjectId;
import org.apache.atlas.type.AtlasType;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
public class ZipSinkTest {
private ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
private ZipSink zipSink;
private List<String> defaultExportOrder = new ArrayList<>(Arrays.asList("a", "b", "c", "d"));
private AtlasExportResult defaultExportResult;
private void initZipSinkWithExportOrder() throws AtlasBaseException {
zipSink = new ZipSink(byteArrayOutputStream);
zipSink.setExportOrder(defaultExportOrder);
zipSink.close();
}
private AtlasExportResult getDefaultExportResult() {
AtlasExportRequest request = new AtlasExportRequest();
List<AtlasObjectId> itemsToExport = new ArrayList<>();
itemsToExport.add(new AtlasObjectId("hive_db", "qualifiedName", "default"));
request.setItemsToExport(itemsToExport);
defaultExportResult = new AtlasExportResult(request, "admin", "1.0.0.0", "root", 100);
return defaultExportResult;
}
private ZipInputStream getZipInputStreamForDefaultExportOrder() throws AtlasBaseException {
initZipSinkWithExportOrder();
ByteArrayInputStream bis = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());
return new ZipInputStream(bis);
}
private String getZipEntryAsStream(ZipInputStream zis) throws IOException {
byte[] buf = new byte[1024];
int n = 0;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
while ((n = zis.read(buf, 0, 1024)) > -1) {
bos.write(buf, 0, n);
}
Assert.assertNotNull(bos);
return bos.toString();
}
@Test
public void correctInit_succeeds() throws AtlasBaseException {
initZipSinkWithExportOrder();
Assert.assertTrue(true);
Assert.assertNotNull(zipSink);
}
@Test
public void zipWithExactlyOneEntry_succeeds() {
try {
ZipInputStream zis = getZipInputStreamForDefaultExportOrder();
try {
Assert.assertNotNull(zis.getNextEntry());
Assert.assertNull(zis.getNextEntry());
} catch (IOException e) {
Assert.assertTrue(false);
}
} catch (AtlasBaseException e) {
Assert.assertTrue(false, "No exception should be thrown.");
}
}
@Test
public void verifyExportOrderEntryName_verifies() throws AtlasBaseException, IOException {
ZipInputStream zis = getZipInputStreamForDefaultExportOrder();
ZipEntry ze = zis.getNextEntry();
Assert.assertEquals(ze.getName().replace(".json", ""), ZipExportFileNames.ATLAS_EXPORT_ORDER_NAME.toString());
}
@Test
public void zipWithExactlyOneEntry_ContentsVerified() throws AtlasBaseException, IOException {
ZipInputStream zis = getZipInputStreamForDefaultExportOrder();
zis.getNextEntry();
Assert.assertEquals(getZipEntryAsStream(zis).replace("\"", "'"), "['a','b','c','d']");
}
@Test
public void zipWithExactlyTwoEntries_ContentsVerified() throws AtlasBaseException, IOException {
ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
useZipSinkToCreateZipWithTwoEntries(byteOutputStream);
ByteArrayInputStream bis = new ByteArrayInputStream(byteOutputStream.toByteArray());
ZipInputStream zipStream = new ZipInputStream(bis);
ZipEntry entry = zipStream.getNextEntry();
Assert.assertEquals(getZipEntryAsStream(zipStream), "[\"a\",\"b\",\"c\",\"d\"]");
Assert.assertEquals(entry.getName().replace(".json", ""), ZipExportFileNames.ATLAS_EXPORT_ORDER_NAME.toString());
entry = zipStream.getNextEntry();
Assert.assertEquals(entry.getName().replace(".json", ""), ZipExportFileNames.ATLAS_EXPORT_INFO_NAME.toString());
Assert.assertTrue(compareJsonWithObject(getZipEntryAsStream(zipStream), defaultExportResult));
}
private void useZipSinkToCreateZipWithTwoEntries(ByteArrayOutputStream byteOutputStream) throws AtlasBaseException {
ZipSink zs = new ZipSink(byteOutputStream);
zs.setExportOrder(defaultExportOrder);
zs.setResult(getDefaultExportResult());
zs.close();
}
private boolean compareJsonWithObject(String s, AtlasExportResult defaultExportResult) {
String json = AtlasType.toJson(defaultExportResult);
return json.equals(s);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.impexp;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.typedef.AtlasTypesDef;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.List;
public class ZipSourceTest {
@DataProvider(name = "zipFileStocks")
public static Object[][] getDataFromZipFile() throws IOException {
FileInputStream fs = ZipFileResourceTestUtils.getFileInputStream("stocks.zip");
return new Object[][] {{ new ZipSource(fs) }};
}
@Test
public void improperInit_ReturnsNullCreationOrder() throws IOException, AtlasBaseException {
byte bytes[] = new byte[10];
ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
ZipSource zs = new ZipSource(bais);
List<String> s = zs.getCreationOrder();
Assert.assertNull(s);
}
@Test(dataProvider = "zipFileStocks")
public void examineContents_BehavesAsExpected(ZipSource zipSource) throws IOException, AtlasBaseException {
List<String> creationOrder = zipSource.getCreationOrder();
Assert.assertNotNull(creationOrder);
Assert.assertEquals(creationOrder.size(), 4);
AtlasTypesDef typesDef = zipSource.getTypesDef();
Assert.assertNotNull(typesDef);
Assert.assertEquals(typesDef.getEntityDefs().size(), 6);
useCreationOrderToFetchEntitiesWithExtInfo(zipSource, creationOrder);
useCreationOrderToFetchEntities(zipSource, creationOrder);
attemptToFetchNonExistentGuid_ReturnsNull(zipSource, "non-existent-guid");
verifyGuidRemovalOnImportComplete(zipSource, creationOrder.get(0));
}
private void useCreationOrderToFetchEntities(ZipSource zipSource, List<String> creationOrder) {
for (String guid : creationOrder) {
AtlasEntity e = zipSource.getByGuid(guid);
Assert.assertNotNull(e);
}
}
private void verifyGuidRemovalOnImportComplete(ZipSource zipSource, String guid) {
AtlasEntity e = zipSource.getByGuid(guid);
Assert.assertNotNull(e);
zipSource.onImportComplete(guid);
e = zipSource.getByGuid(guid);
Assert.assertNull(e);
}
private void attemptToFetchNonExistentGuid_ReturnsNull(ZipSource zipSource, String guid) {
AtlasEntity e = zipSource.getByGuid(guid);
Assert.assertNull(e);
}
private void useCreationOrderToFetchEntitiesWithExtInfo(ZipSource zipSource, List<String> creationOrder) throws AtlasBaseException {
for (String guid : creationOrder) {
AtlasEntity.AtlasEntityExtInfo e = zipSource.getEntityWithExtInfo(guid);
Assert.assertNotNull(e);
}
}
@Test(dataProvider = "zipFileStocks")
public void iteratorBehavor_WorksAsExpected(ZipSource zipSource) throws IOException, AtlasBaseException {
Assert.assertTrue(zipSource.hasNext());
List<String> creationOrder = zipSource.getCreationOrder();
for (int i = 0; i < creationOrder.size(); i++) {
AtlasEntity e = zipSource.next();
Assert.assertEquals(e.getGuid(), creationOrder.get(i));
}
Assert.assertFalse(zipSource.hasNext());
}
}
......@@ -31,6 +31,10 @@ import org.apache.atlas.model.impexp.AtlasExportResult;
import org.apache.atlas.model.impexp.AtlasImportRequest;
import org.apache.atlas.model.impexp.AtlasImportResult;
import org.apache.atlas.model.metrics.AtlasMetrics;
import org.apache.atlas.repository.impexp.ExportService;
import org.apache.atlas.repository.impexp.ImportService;
import org.apache.atlas.repository.impexp.ZipSink;
import org.apache.atlas.repository.impexp.ZipSource;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.services.MetricsService;
import org.apache.atlas.store.AtlasTypeDefStore;
......@@ -53,7 +57,13 @@ import org.springframework.security.core.context.SecurityContextHolder;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.*;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
......@@ -359,7 +369,7 @@ public class AdminResource {
try {
AtlasImportRequest request = new AtlasImportRequest(Servlets.getParameterMap(httpServletRequest));
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ImportService importService = new ImportService(this.typesDefStore, this.entityStore, this.typeRegistry);
ImportService importService = new ImportService(this.typesDefStore, this.entityStore, this.typeRegistry);
ZipSource zipSource = new ZipSource(inputStream);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment