Commit 1dcc3073 by David Radley Committed by Madhan Neethiraj

ATLAS-2112: move the Atlas models to subfolders

parent 9d38c6a9
...@@ -368,10 +368,10 @@ ...@@ -368,10 +368,10 @@
<goal>copy-resources</goal> <goal>copy-resources</goal>
</goals> </goals>
<configuration> <configuration>
<outputDirectory>${basedir}/target/models</outputDirectory> <outputDirectory>${basedir}/target/models/1000-Hadoop</outputDirectory>
<resources> <resources>
<resource> <resource>
<directory>${basedir}/../models</directory> <directory>${basedir}/../models/1000-Hadoop</directory>
<filtering>true</filtering> <filtering>true</filtering>
</resource> </resource>
</resources> </resources>
......
...@@ -417,10 +417,10 @@ ...@@ -417,10 +417,10 @@
<goal>copy-resources</goal> <goal>copy-resources</goal>
</goals> </goals>
<configuration> <configuration>
<outputDirectory>${basedir}/target/models</outputDirectory> <outputDirectory>${basedir}/target/models/1000-Hadoop</outputDirectory>
<resources> <resources>
<resource> <resource>
<directory>${basedir}/../models</directory> <directory>${basedir}/../models/1000-Hadoop</directory>
<filtering>true</filtering> <filtering>true</filtering>
</resource> </resource>
</resources> </resources>
......
...@@ -429,10 +429,10 @@ ...@@ -429,10 +429,10 @@
<goal>copy-resources</goal> <goal>copy-resources</goal>
</goals> </goals>
<configuration> <configuration>
<outputDirectory>${basedir}/target/models</outputDirectory> <outputDirectory>${basedir}/target/models/1000-Hadoop</outputDirectory>
<resources> <resources>
<resource> <resource>
<directory>${basedir}/../models</directory> <directory>${basedir}/../models/1000-Hadoop</directory>
<filtering>true</filtering> <filtering>true</filtering>
</resource> </resource>
</resources> </resources>
......
...@@ -501,10 +501,10 @@ ...@@ -501,10 +501,10 @@
<goal>copy-resources</goal> <goal>copy-resources</goal>
</goals> </goals>
<configuration> <configuration>
<outputDirectory>${basedir}/target/models</outputDirectory> <outputDirectory>${basedir}/target/models/1000-Hadoop</outputDirectory>
<resources> <resources>
<resource> <resource>
<directory>${basedir}/../models</directory> <directory>${basedir}/../models/1000-Hadoop</directory>
<filtering>true</filtering> <filtering>true</filtering>
</resource> </resource>
</resources> </resources>
......
...@@ -69,6 +69,7 @@ import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONL ...@@ -69,6 +69,7 @@ import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONL
@Service @Service
public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler { public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler {
private static final Logger LOG = LoggerFactory.getLogger(AtlasTypeDefStoreInitializer.class); private static final Logger LOG = LoggerFactory.getLogger(AtlasTypeDefStoreInitializer.class);
public static final String PATCHES_FOLDER_NAME = "patches";
private final AtlasTypeDefStore atlasTypeDefStore; private final AtlasTypeDefStore atlasTypeDefStore;
private final AtlasTypeRegistry atlasTypeRegistry; private final AtlasTypeRegistry atlasTypeRegistry;
...@@ -97,28 +98,63 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler { ...@@ -97,28 +98,63 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler {
LOG.info("<== AtlasTypeDefStoreInitializer.init()"); LOG.info("<== AtlasTypeDefStoreInitializer.init()");
} }
/**
* This method is looking for folders in alphabetical order in the models directory. It loads each of these folders and their associated patches in order.
* It then loads any models in the top level folder and its patches.
*
* This allows models to be grouped into folders to help managability.
*
*/
private void loadBootstrapTypeDefs() { private void loadBootstrapTypeDefs() {
LOG.info("==> AtlasTypeDefStoreInitializer.loadBootstrapTypeDefs()"); LOG.info("==> AtlasTypeDefStoreInitializer.loadBootstrapTypeDefs()");
String atlasHomeDir = System.getProperty("atlas.home"); String atlasHomeDir = System.getProperty("atlas.home");
String typesDirName = (StringUtils.isEmpty(atlasHomeDir) ? "." : atlasHomeDir) + File.separator + "models"; String modelsDirName = (StringUtils.isEmpty(atlasHomeDir) ? "." : atlasHomeDir) + File.separator + "models";
File typesDir = new File(typesDirName);
File[] typeDefFiles = typesDir.exists() ? typesDir.listFiles() : null;
if (typeDefFiles == null || typeDefFiles.length == 0) { if (modelsDirName == null || modelsDirName.length() == 0) {
LOG.info("Types directory {} does not exist or not readable or has no typedef files", typesDirName); LOG.info("Types directory {} does not exist or not readable or has no typedef files", modelsDirName);
} else {
// look for folders we need to load models from
File topModeltypesDir = new File(modelsDirName);
File[] modelsDirContents = topModeltypesDir.exists() ? topModeltypesDir.listFiles() : null;
Arrays.sort(modelsDirContents);
for (File folder : modelsDirContents) {
if (folder.isFile()) {
// ignore files
continue;
} else if (!folder.getName().equals(PATCHES_FOLDER_NAME)){
// load the models alphabetically in the subfolders apart from patches
loadModelsInFolder(folder);
}
}
return; // load any files in the top models folder and any associated patches.
loadModelsInFolder(topModeltypesDir);
}
LOG.info("<== AtlasTypeDefStoreInitializer.loadBootstrapTypeDefs()");
} }
/**
* Load all the model files in the supplied folder followed by the contents of the patches folder.
* @param typesDir
*/
private void loadModelsInFolder(File typesDir) {
LOG.info("==> AtlasTypeDefStoreInitializer({})", typesDir);
String typesDirName = typesDir.getName();
File[] typeDefFiles = typesDir.exists() ? typesDir.listFiles() : null;
if (typeDefFiles == null || typeDefFiles.length == 0) {
LOG.info("Types directory {} does not exist or not readable or has no typedef files", typesDirName );
} else {
// sort the files by filename // sort the files by filename
Arrays.sort(typeDefFiles); Arrays.sort(typeDefFiles);
for (File typeDefFile : typeDefFiles) { for (File typeDefFile : typeDefFiles) {
if (!typeDefFile.isFile()) { if (typeDefFile.isFile()) {
continue;
}
try { try {
String jsonStr = new String(Files.readAllBytes(typeDefFile.toPath()), StandardCharsets.UTF_8); String jsonStr = new String(Files.readAllBytes(typeDefFile.toPath()), StandardCharsets.UTF_8);
AtlasTypesDef typesDef = AtlasType.fromJson(jsonStr, AtlasTypesDef.class); AtlasTypesDef typesDef = AtlasType.fromJson(jsonStr, AtlasTypesDef.class);
...@@ -144,10 +180,11 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler { ...@@ -144,10 +180,11 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler {
LOG.error("error while registering types in file {}", typeDefFile.getAbsolutePath(), t); LOG.error("error while registering types in file {}", typeDefFile.getAbsolutePath(), t);
} }
} }
}
applyTypePatches(typesDirName); applyTypePatches(typesDir.getPath());
}
LOG.info("<== AtlasTypeDefStoreInitializer.loadBootstrapTypeDefs()"); LOG.info("<== AtlasTypeDefStoreInitializer({})", typesDir);
} }
public static AtlasTypesDef getTypesToCreate(AtlasTypesDef typesDef, AtlasTypeRegistry typeRegistry) { public static AtlasTypesDef getTypesToCreate(AtlasTypesDef typesDef, AtlasTypeRegistry typeRegistry) {
...@@ -327,15 +364,14 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler { ...@@ -327,15 +364,14 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler {
} }
private void applyTypePatches(String typesDirName) { private void applyTypePatches(String typesDirName) {
String typePatchesDirName = typesDirName + File.separator + "patches"; String typePatchesDirName = typesDirName + File.separator + PATCHES_FOLDER_NAME;
File typePatchesDir = new File(typePatchesDirName); File typePatchesDir = new File(typePatchesDirName);
File[] typePatchFiles = typePatchesDir.exists() ? typePatchesDir.listFiles() : null; File[] typePatchFiles = typePatchesDir.exists() ? typePatchesDir.listFiles() : null;
if (typePatchFiles == null || typePatchFiles.length == 0) { if (typePatchFiles == null || typePatchFiles.length == 0) {
LOG.info("Type patches directory {} does not exist or not readable or has no patches", typePatchesDirName); LOG.info("Type patches directory {} does not exist or not readable or has no patches", typePatchesDirName);
} else {
return; LOG.info("Type patches directory {} is being processed", typePatchesDirName);
}
// sort the files by filename // sort the files by filename
Arrays.sort(typePatchFiles); Arrays.sort(typePatchFiles);
...@@ -355,9 +391,7 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler { ...@@ -355,9 +391,7 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler {
} }
for (File typePatchFile : typePatchFiles) { for (File typePatchFile : typePatchFiles) {
if (!typePatchFile.isFile()) { if (typePatchFile.isFile()) {
continue;
}
LOG.info("Applying patches in file {}", typePatchFile.getAbsolutePath()); LOG.info("Applying patches in file {}", typePatchFile.getAbsolutePath());
...@@ -392,6 +426,8 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler { ...@@ -392,6 +426,8 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler {
} }
} }
} }
}
}
/** /**
* typedef patch details * typedef patch details
......
...@@ -68,7 +68,7 @@ public class ImportServiceTest { ...@@ -68,7 +68,7 @@ public class ImportServiceTest {
@Test(dataProvider = "sales") @Test(dataProvider = "sales")
public void importDB1(ZipSource zipSource) throws AtlasBaseException, IOException { public void importDB1(ZipSource zipSource) throws AtlasBaseException, IOException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry); loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource); runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource);
} }
...@@ -79,7 +79,7 @@ public class ImportServiceTest { ...@@ -79,7 +79,7 @@ public class ImportServiceTest {
@Test(dataProvider = "reporting") @Test(dataProvider = "reporting")
public void importDB2(ZipSource zipSource) throws AtlasBaseException, IOException { public void importDB2(ZipSource zipSource) throws AtlasBaseException, IOException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry); loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource); runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource);
} }
...@@ -90,7 +90,7 @@ public class ImportServiceTest { ...@@ -90,7 +90,7 @@ public class ImportServiceTest {
@Test(dataProvider = "logging") @Test(dataProvider = "logging")
public void importDB3(ZipSource zipSource) throws AtlasBaseException, IOException { public void importDB3(ZipSource zipSource) throws AtlasBaseException, IOException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry); loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource); runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource);
} }
...@@ -101,7 +101,7 @@ public class ImportServiceTest { ...@@ -101,7 +101,7 @@ public class ImportServiceTest {
@Test(dataProvider = "salesNewTypeAttrs", dependsOnMethods = "importDB1") @Test(dataProvider = "salesNewTypeAttrs", dependsOnMethods = "importDB1")
public void importDB4(ZipSource zipSource) throws AtlasBaseException, IOException { public void importDB4(ZipSource zipSource) throws AtlasBaseException, IOException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry); loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
runImportWithParameters(new ImportService(typeDefStore, entityStore, typeRegistry), getDefaultImportRequest(), zipSource); runImportWithParameters(new ImportService(typeDefStore, entityStore, typeRegistry), getDefaultImportRequest(), zipSource);
} }
...@@ -150,8 +150,8 @@ public class ImportServiceTest { ...@@ -150,8 +150,8 @@ public class ImportServiceTest {
@Test(dataProvider = "ctas") @Test(dataProvider = "ctas")
public void importCTAS(ZipSource zipSource) throws IOException, AtlasBaseException { public void importCTAS(ZipSource zipSource) throws IOException, AtlasBaseException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry); loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
loadModelFromJson("1030-hive_model.json", typeDefStore, typeRegistry); loadModelFromJson("1000-Hadoop/1030-hive_model.json", typeDefStore, typeRegistry);
runImportWithNoParameters(getImportService(), zipSource); runImportWithNoParameters(getImportService(), zipSource);
} }
......
...@@ -688,10 +688,10 @@ ...@@ -688,10 +688,10 @@
<goal>copy-resources</goal> <goal>copy-resources</goal>
</goals> </goals>
<configuration> <configuration>
<outputDirectory>${basedir}/target/models</outputDirectory> <outputDirectory>${basedir}/target/models/1000-Hadoop</outputDirectory>
<resources> <resources>
<resource> <resource>
<directory>${basedir}/../addons/models</directory> <directory>${basedir}/../addons/models/1000-Hadoop</directory>
<filtering>true</filtering> <filtering>true</filtering>
</resource> </resource>
</resources> </resources>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment