Commit 9368c8a0 by Ashutosh Mestry Committed by Madhan Neethiraj

ATLAS-2460: migration-import implementation (#2 - UT refactoring)

parent 3ded3ee2
...@@ -21,67 +21,29 @@ package org.apache.atlas.repository.migration; ...@@ -21,67 +21,29 @@ package org.apache.atlas.repository.migration;
import com.google.inject.Inject; import com.google.inject.Inject;
import org.apache.atlas.TestModules; import org.apache.atlas.TestModules;
import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.repository.graph.AtlasGraphProvider;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper; import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.repository.graphdb.AtlasEdgeDirection; import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
import org.apache.atlas.repository.graphdb.AtlasGraph; import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.repository.store.bootstrap.AtlasTypeDefStoreInitializer;
import org.apache.atlas.runner.LocalSolrRunner;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.utils.TestResourceFileUtils;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Guice; import org.testng.annotations.Guice;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import static org.apache.atlas.graph.GraphSandboxUtil.useLocalSolr;
import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.loadModelFromJson;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
@Guice(modules = TestModules.TestOnlyModule.class) @Guice(modules = TestModules.TestOnlyModule.class)
public class HiveParititionIT extends MigrationBaseAsserts { public class HiveParititionTest extends MigrationBaseAsserts {
@Inject
private AtlasTypeDefStore typeDefStore;
@Inject
private AtlasTypeRegistry typeRegistry;
@Inject
private AtlasTypeDefStoreInitializer storeInitializer;
@Inject
private GraphBackedSearchIndexer indexer;
@Inject @Inject
public HiveParititionIT(AtlasGraph graph) { public HiveParititionTest(AtlasGraph graph) {
super(graph); super(graph);
} }
@AfterClass
public void clear() throws Exception {
AtlasGraphProvider.cleanup();
if (useLocalSolr()) {
LocalSolrRunner.stop();
}
}
@Test @Test
public void fileImporterTest() throws IOException, AtlasBaseException { public void fileImporterTest() throws IOException, AtlasBaseException {
loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry); runFileImporter("parts_db");
loadModelFromJson("1000-Hadoop/1030-hive_model.json", typeDefStore, typeRegistry);
String directoryName = TestResourceFileUtils.getDirectory("parts_db");
DataMigrationService.FileImporter fi = new DataMigrationService.FileImporter(typeDefStore, typeRegistry,
storeInitializer, directoryName, indexer);
fi.run();
assertPartitionKeyProperty(getVertex("hive_table", "t1"), 1); assertPartitionKeyProperty(getVertex("hive_table", "t1"), 1);
assertPartitionKeyProperty(getVertex("hive_table", "tv1"), 1); assertPartitionKeyProperty(getVertex("hive_table", "tv1"), 1);
......
...@@ -18,65 +18,26 @@ ...@@ -18,65 +18,26 @@
package org.apache.atlas.repository.migration; package org.apache.atlas.repository.migration;
import com.google.inject.Inject; import com.google.inject.Inject;
import org.apache.atlas.RequestContextV1;
import org.apache.atlas.TestModules; import org.apache.atlas.TestModules;
import org.apache.atlas.TestUtilsV2;
import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.repository.graph.AtlasGraphProvider;
import org.apache.atlas.repository.graphdb.AtlasEdgeDirection; import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
import org.apache.atlas.repository.graphdb.AtlasGraph; import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.runner.LocalSolrRunner; import org.testng.annotations.Guice;
import org.apache.atlas.store.AtlasTypeDefStore; import org.testng.annotations.Test;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.utils.TestResourceFileUtils;
import org.testng.ITestContext;
import org.testng.annotations.*;
import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import static org.apache.atlas.graph.GraphSandboxUtil.useLocalSolr;
import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.loadModelFromJson;
@Guice(modules = TestModules.TestOnlyModule.class) @Guice(modules = TestModules.TestOnlyModule.class)
public class HiveStocksIT extends MigrationBaseAsserts { public class HiveStocksTest extends MigrationBaseAsserts {
@Inject
private AtlasTypeDefStore typeDefStore;
@Inject
private AtlasTypeRegistry typeRegistry;
@Inject @Inject
public HiveStocksIT(AtlasGraph graph) { public HiveStocksTest(AtlasGraph graph) {
super(graph); super(graph);
} }
@BeforeTest @Test
public void setupTest() { public void migrateStocks() throws AtlasBaseException, IOException {
RequestContextV1.clear(); runFileImporter("stocks_db");
RequestContextV1.get().setUser(TestUtilsV2.TEST_USER, null);
}
@AfterClass
public void clear() throws Exception {
AtlasGraphProvider.cleanup();
if (useLocalSolr()) {
LocalSolrRunner.stop();
}
}
@DataProvider(name = "stocks-2-branch08-tag")
public static Object[][] getStocksTag(ITestContext context) throws IOException {
return new Object[][]{{ TestResourceFileUtils.getFileInputStream("stocks-2-0.8-extended-tag.json") }};
}
@Test(dataProvider = "stocks-2-branch08-tag")
public void migrateFromEarlierVersionWithTag(FileInputStream fs) throws AtlasBaseException, IOException {
loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
loadModelFromJson("1000-Hadoop/1030-hive_model.json", typeDefStore, typeRegistry);
typeDefStore.loadLegacyData(RelationshipCacheGenerator.get(typeRegistry), fs);
assertHiveVertices(1, 1, 7); assertHiveVertices(1, 1, 7);
assertTypeCountNameGuid("hive_db", 1, "stocks", "4e13b36b-9c54-4616-9001-1058221165d0"); assertTypeCountNameGuid("hive_db", 1, "stocks", "4e13b36b-9c54-4616-9001-1058221165d0");
...@@ -97,6 +58,6 @@ public class HiveStocksIT extends MigrationBaseAsserts { ...@@ -97,6 +58,6 @@ public class HiveStocksIT extends MigrationBaseAsserts {
assertEdges(getVertex("hive_table", "stocks_daily").getEdges(AtlasEdgeDirection.OUT).iterator(), 1, 1, "hive_db_tables"); assertEdges(getVertex("hive_table", "stocks_daily").getEdges(AtlasEdgeDirection.OUT).iterator(), 1, 1, "hive_db_tables");
assertEdges(getVertex("hive_column", "high").getEdges(AtlasEdgeDirection.OUT).iterator(), 1,1, "hive_table_columns"); assertEdges(getVertex("hive_column", "high").getEdges(AtlasEdgeDirection.OUT).iterator(), 1,1, "hive_table_columns");
assertMigrationStatus(164); assertMigrationStatus(187);
} }
} }
...@@ -18,13 +18,26 @@ ...@@ -18,13 +18,26 @@
package org.apache.atlas.repository.migration; package org.apache.atlas.repository.migration;
import com.google.inject.Inject;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.repository.graph.AtlasGraphProvider;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper; import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.repository.graphdb.*; import org.apache.atlas.repository.graphdb.*;
import org.apache.atlas.repository.store.bootstrap.AtlasTypeDefStoreInitializer;
import org.apache.atlas.runner.LocalSolrRunner;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.utils.TestResourceFileUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.testng.annotations.AfterClass;
import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import static org.apache.atlas.graph.GraphSandboxUtil.useLocalSolr;
import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.loadModelFromJson;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNotNull;
import static org.testng.AssertJUnit.assertTrue; import static org.testng.AssertJUnit.assertTrue;
...@@ -34,12 +47,47 @@ public class MigrationBaseAsserts { ...@@ -34,12 +47,47 @@ public class MigrationBaseAsserts {
private final String TYPE_NAME_PROPERTY = "__typeName"; private final String TYPE_NAME_PROPERTY = "__typeName";
private final String R_GUID_PROPERTY_NAME = "_r__guid"; private final String R_GUID_PROPERTY_NAME = "_r__guid";
@Inject
private AtlasTypeDefStore typeDefStore;
@Inject
private AtlasTypeRegistry typeRegistry;
@Inject
private AtlasTypeDefStoreInitializer storeInitializer;
@Inject
private GraphBackedSearchIndexer indexer;
protected AtlasGraph graph; protected AtlasGraph graph;
protected MigrationBaseAsserts(AtlasGraph graph) { protected MigrationBaseAsserts(AtlasGraph graph) {
this.graph = graph; this.graph = graph;
} }
@AfterClass
public void clear() throws Exception {
AtlasGraphProvider.cleanup();
if (useLocalSolr()) {
LocalSolrRunner.stop();
}
}
private void loadTypesFromJson() throws IOException, AtlasBaseException {
loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
loadModelFromJson("1000-Hadoop/1030-hive_model.json", typeDefStore, typeRegistry);
}
protected void runFileImporter(String directoryToImport) throws IOException, AtlasBaseException {
loadTypesFromJson();
String directoryName = TestResourceFileUtils.getDirectory(directoryToImport);
DataMigrationService.FileImporter fi = new DataMigrationService.FileImporter(typeDefStore, typeRegistry,
storeInitializer, directoryName, indexer);
fi.run();
}
protected void assertHiveVertices(int dbCount, int tableCount, int columnCount) { protected void assertHiveVertices(int dbCount, int tableCount, int columnCount) {
int i = 0; int i = 0;
......
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment