Commit 1dcc3073 by David Radley Committed by Madhan Neethiraj

ATLAS-2112: move the Atlas models to subfolders

parent 9d38c6a9
...@@ -368,10 +368,10 @@ ...@@ -368,10 +368,10 @@
<goal>copy-resources</goal> <goal>copy-resources</goal>
</goals> </goals>
<configuration> <configuration>
<outputDirectory>${basedir}/target/models</outputDirectory> <outputDirectory>${basedir}/target/models/1000-Hadoop</outputDirectory>
<resources> <resources>
<resource> <resource>
<directory>${basedir}/../models</directory> <directory>${basedir}/../models/1000-Hadoop</directory>
<filtering>true</filtering> <filtering>true</filtering>
</resource> </resource>
</resources> </resources>
......
...@@ -417,10 +417,10 @@ ...@@ -417,10 +417,10 @@
<goal>copy-resources</goal> <goal>copy-resources</goal>
</goals> </goals>
<configuration> <configuration>
<outputDirectory>${basedir}/target/models</outputDirectory> <outputDirectory>${basedir}/target/models/1000-Hadoop</outputDirectory>
<resources> <resources>
<resource> <resource>
<directory>${basedir}/../models</directory> <directory>${basedir}/../models/1000-Hadoop</directory>
<filtering>true</filtering> <filtering>true</filtering>
</resource> </resource>
</resources> </resources>
......
...@@ -429,10 +429,10 @@ ...@@ -429,10 +429,10 @@
<goal>copy-resources</goal> <goal>copy-resources</goal>
</goals> </goals>
<configuration> <configuration>
<outputDirectory>${basedir}/target/models</outputDirectory> <outputDirectory>${basedir}/target/models/1000-Hadoop</outputDirectory>
<resources> <resources>
<resource> <resource>
<directory>${basedir}/../models</directory> <directory>${basedir}/../models/1000-Hadoop</directory>
<filtering>true</filtering> <filtering>true</filtering>
</resource> </resource>
</resources> </resources>
......
...@@ -501,10 +501,10 @@ ...@@ -501,10 +501,10 @@
<goal>copy-resources</goal> <goal>copy-resources</goal>
</goals> </goals>
<configuration> <configuration>
<outputDirectory>${basedir}/target/models</outputDirectory> <outputDirectory>${basedir}/target/models/1000-Hadoop</outputDirectory>
<resources> <resources>
<resource> <resource>
<directory>${basedir}/../models</directory> <directory>${basedir}/../models/1000-Hadoop</directory>
<filtering>true</filtering> <filtering>true</filtering>
</resource> </resource>
</resources> </resources>
......
...@@ -69,6 +69,7 @@ import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONL ...@@ -69,6 +69,7 @@ import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONL
@Service @Service
public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler { public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler {
private static final Logger LOG = LoggerFactory.getLogger(AtlasTypeDefStoreInitializer.class); private static final Logger LOG = LoggerFactory.getLogger(AtlasTypeDefStoreInitializer.class);
public static final String PATCHES_FOLDER_NAME = "patches";
private final AtlasTypeDefStore atlasTypeDefStore; private final AtlasTypeDefStore atlasTypeDefStore;
private final AtlasTypeRegistry atlasTypeRegistry; private final AtlasTypeRegistry atlasTypeRegistry;
...@@ -97,57 +98,93 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler { ...@@ -97,57 +98,93 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler {
LOG.info("<== AtlasTypeDefStoreInitializer.init()"); LOG.info("<== AtlasTypeDefStoreInitializer.init()");
} }
/**
* This method is looking for folders in alphabetical order in the models directory. It loads each of these folders and their associated patches in order.
* It then loads any models in the top level folder and its patches.
*
* This allows models to be grouped into folders to help managability.
*
*/
private void loadBootstrapTypeDefs() { private void loadBootstrapTypeDefs() {
LOG.info("==> AtlasTypeDefStoreInitializer.loadBootstrapTypeDefs()"); LOG.info("==> AtlasTypeDefStoreInitializer.loadBootstrapTypeDefs()");
String atlasHomeDir = System.getProperty("atlas.home"); String atlasHomeDir = System.getProperty("atlas.home");
String typesDirName = (StringUtils.isEmpty(atlasHomeDir) ? "." : atlasHomeDir) + File.separator + "models"; String modelsDirName = (StringUtils.isEmpty(atlasHomeDir) ? "." : atlasHomeDir) + File.separator + "models";
File typesDir = new File(typesDirName);
File[] typeDefFiles = typesDir.exists() ? typesDir.listFiles() : null;
if (typeDefFiles == null || typeDefFiles.length == 0) { if (modelsDirName == null || modelsDirName.length() == 0) {
LOG.info("Types directory {} does not exist or not readable or has no typedef files", typesDirName); LOG.info("Types directory {} does not exist or not readable or has no typedef files", modelsDirName);
} else {
// look for folders we need to load models from
File topModeltypesDir = new File(modelsDirName);
File[] modelsDirContents = topModeltypesDir.exists() ? topModeltypesDir.listFiles() : null;
Arrays.sort(modelsDirContents);
return; for (File folder : modelsDirContents) {
if (folder.isFile()) {
// ignore files
continue;
} else if (!folder.getName().equals(PATCHES_FOLDER_NAME)){
// load the models alphabetically in the subfolders apart from patches
loadModelsInFolder(folder);
}
}
// load any files in the top models folder and any associated patches.
loadModelsInFolder(topModeltypesDir);
} }
LOG.info("<== AtlasTypeDefStoreInitializer.loadBootstrapTypeDefs()");
}
// sort the files by filename /**
Arrays.sort(typeDefFiles); * Load all the model files in the supplied folder followed by the contents of the patches folder.
* @param typesDir
*/
private void loadModelsInFolder(File typesDir) {
LOG.info("==> AtlasTypeDefStoreInitializer({})", typesDir);
for (File typeDefFile : typeDefFiles) { String typesDirName = typesDir.getName();
if (!typeDefFile.isFile()) { File[] typeDefFiles = typesDir.exists() ? typesDir.listFiles() : null;
continue;
}
try { if (typeDefFiles == null || typeDefFiles.length == 0) {
String jsonStr = new String(Files.readAllBytes(typeDefFile.toPath()), StandardCharsets.UTF_8); LOG.info("Types directory {} does not exist or not readable or has no typedef files", typesDirName );
AtlasTypesDef typesDef = AtlasType.fromJson(jsonStr, AtlasTypesDef.class); } else {
if (typesDef == null || typesDef.isEmpty()) { // sort the files by filename
LOG.info("No type in file {}", typeDefFile.getAbsolutePath()); Arrays.sort(typeDefFiles);
continue; for (File typeDefFile : typeDefFiles) {
} if (typeDefFile.isFile()) {
try {
String jsonStr = new String(Files.readAllBytes(typeDefFile.toPath()), StandardCharsets.UTF_8);
AtlasTypesDef typesDef = AtlasType.fromJson(jsonStr, AtlasTypesDef.class);
AtlasTypesDef typesToCreate = getTypesToCreate(typesDef, atlasTypeRegistry); if (typesDef == null || typesDef.isEmpty()) {
AtlasTypesDef typesToUpdate = getTypesToUpdate(typesDef, atlasTypeRegistry); LOG.info("No type in file {}", typeDefFile.getAbsolutePath());
if (!typesToCreate.isEmpty() || !typesToUpdate.isEmpty()) { continue;
atlasTypeDefStore.createUpdateTypesDef(typesToCreate, typesToUpdate); }
LOG.info("Created/Updated types defined in file {}", typeDefFile.getAbsolutePath()); AtlasTypesDef typesToCreate = getTypesToCreate(typesDef, atlasTypeRegistry);
} else { AtlasTypesDef typesToUpdate = getTypesToUpdate(typesDef, atlasTypeRegistry);
LOG.info("No new type in file {}", typeDefFile.getAbsolutePath());
}
} catch (Throwable t) { if (!typesToCreate.isEmpty() || !typesToUpdate.isEmpty()) {
LOG.error("error while registering types in file {}", typeDefFile.getAbsolutePath(), t); atlasTypeDefStore.createUpdateTypesDef(typesToCreate, typesToUpdate);
}
}
applyTypePatches(typesDirName); LOG.info("Created/Updated types defined in file {}", typeDefFile.getAbsolutePath());
} else {
LOG.info("No new type in file {}", typeDefFile.getAbsolutePath());
}
LOG.info("<== AtlasTypeDefStoreInitializer.loadBootstrapTypeDefs()"); } catch (Throwable t) {
LOG.error("error while registering types in file {}", typeDefFile.getAbsolutePath(), t);
}
}
}
applyTypePatches(typesDir.getPath());
}
LOG.info("<== AtlasTypeDefStoreInitializer({})", typesDir);
} }
public static AtlasTypesDef getTypesToCreate(AtlasTypesDef typesDef, AtlasTypeRegistry typeRegistry) { public static AtlasTypesDef getTypesToCreate(AtlasTypesDef typesDef, AtlasTypeRegistry typeRegistry) {
...@@ -327,68 +364,67 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler { ...@@ -327,68 +364,67 @@ public class AtlasTypeDefStoreInitializer implements ActiveStateChangeHandler {
} }
private void applyTypePatches(String typesDirName) { private void applyTypePatches(String typesDirName) {
String typePatchesDirName = typesDirName + File.separator + "patches"; String typePatchesDirName = typesDirName + File.separator + PATCHES_FOLDER_NAME;
File typePatchesDir = new File(typePatchesDirName); File typePatchesDir = new File(typePatchesDirName);
File[] typePatchFiles = typePatchesDir.exists() ? typePatchesDir.listFiles() : null; File[] typePatchFiles = typePatchesDir.exists() ? typePatchesDir.listFiles() : null;
if (typePatchFiles == null || typePatchFiles.length == 0) { if (typePatchFiles == null || typePatchFiles.length == 0) {
LOG.info("Type patches directory {} does not exist or not readable or has no patches", typePatchesDirName); LOG.info("Type patches directory {} does not exist or not readable or has no patches", typePatchesDirName);
} else {
LOG.info("Type patches directory {} is being processed", typePatchesDirName);
return; // sort the files by filename
} Arrays.sort(typePatchFiles);
// sort the files by filename
Arrays.sort(typePatchFiles);
PatchHandler[] patchHandlers = new PatchHandler[] { PatchHandler[] patchHandlers = new PatchHandler[] {
new AddAttributePatchHandler(atlasTypeDefStore, atlasTypeRegistry), new AddAttributePatchHandler(atlasTypeDefStore, atlasTypeRegistry),
new UpdateTypeDefOptionsPatchHandler(atlasTypeDefStore, atlasTypeRegistry), new UpdateTypeDefOptionsPatchHandler(atlasTypeDefStore, atlasTypeRegistry),
new UpdateAttributePatchHandler(atlasTypeDefStore, atlasTypeRegistry) new UpdateAttributePatchHandler(atlasTypeDefStore, atlasTypeRegistry)
}; };
Map<String, PatchHandler> patchHandlerRegistry = new HashMap<>(); Map<String, PatchHandler> patchHandlerRegistry = new HashMap<>();
for (PatchHandler patchHandler : patchHandlers) { for (PatchHandler patchHandler : patchHandlers) {
for (String supportedAction : patchHandler.getSupportedActions()) { for (String supportedAction : patchHandler.getSupportedActions()) {
patchHandlerRegistry.put(supportedAction, patchHandler); patchHandlerRegistry.put(supportedAction, patchHandler);
}
} }
}
for (File typePatchFile : typePatchFiles) { for (File typePatchFile : typePatchFiles) {
if (!typePatchFile.isFile()) { if (typePatchFile.isFile()) {
continue;
}
LOG.info("Applying patches in file {}", typePatchFile.getAbsolutePath()); LOG.info("Applying patches in file {}", typePatchFile.getAbsolutePath());
try { try {
String jsonStr = new String(Files.readAllBytes(typePatchFile.toPath()), StandardCharsets.UTF_8); String jsonStr = new String(Files.readAllBytes(typePatchFile.toPath()), StandardCharsets.UTF_8);
TypeDefPatches patches = AtlasType.fromJson(jsonStr, TypeDefPatches.class); TypeDefPatches patches = AtlasType.fromJson(jsonStr, TypeDefPatches.class);
if (patches == null || CollectionUtils.isEmpty(patches.getPatches())) { if (patches == null || CollectionUtils.isEmpty(patches.getPatches())) {
LOG.info("No patches in file {}", typePatchFile.getAbsolutePath()); LOG.info("No patches in file {}", typePatchFile.getAbsolutePath());
continue; continue;
} }
for (TypeDefPatch patch : patches.getPatches()) { for (TypeDefPatch patch : patches.getPatches()) {
PatchHandler patchHandler = patchHandlerRegistry.get(patch.getAction()); PatchHandler patchHandler = patchHandlerRegistry.get(patch.getAction());
if (patchHandler == null) { if (patchHandler == null) {
LOG.error("Unknown patch action {} in file {}. Ignored", LOG.error("Unknown patch action {} in file {}. Ignored",
patch.getAction(), typePatchFile.getAbsolutePath()); patch.getAction(), typePatchFile.getAbsolutePath());
continue; continue;
} }
try { try {
patchHandler.applyPatch(patch); patchHandler.applyPatch(patch);
} catch (AtlasBaseException excp) { } catch (AtlasBaseException excp) {
LOG.error("Failed to apply {} patch in file {}. Ignored", patch.getAction(), typePatchFile.getAbsolutePath(), excp); LOG.error("Failed to apply {} patch in file {}. Ignored", patch.getAction(), typePatchFile.getAbsolutePath(), excp);
}
}
} catch (Throwable t) {
LOG.error("Failed to apply patches in file {}. Ignored", typePatchFile.getAbsolutePath(), t);
} }
} }
} catch (Throwable t) {
LOG.error("Failed to apply patches in file {}. Ignored", typePatchFile.getAbsolutePath(), t);
} }
} }
} }
......
...@@ -68,7 +68,7 @@ public class ImportServiceTest { ...@@ -68,7 +68,7 @@ public class ImportServiceTest {
@Test(dataProvider = "sales") @Test(dataProvider = "sales")
public void importDB1(ZipSource zipSource) throws AtlasBaseException, IOException { public void importDB1(ZipSource zipSource) throws AtlasBaseException, IOException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry); loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource); runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource);
} }
...@@ -79,7 +79,7 @@ public class ImportServiceTest { ...@@ -79,7 +79,7 @@ public class ImportServiceTest {
@Test(dataProvider = "reporting") @Test(dataProvider = "reporting")
public void importDB2(ZipSource zipSource) throws AtlasBaseException, IOException { public void importDB2(ZipSource zipSource) throws AtlasBaseException, IOException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry); loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource); runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource);
} }
...@@ -90,7 +90,7 @@ public class ImportServiceTest { ...@@ -90,7 +90,7 @@ public class ImportServiceTest {
@Test(dataProvider = "logging") @Test(dataProvider = "logging")
public void importDB3(ZipSource zipSource) throws AtlasBaseException, IOException { public void importDB3(ZipSource zipSource) throws AtlasBaseException, IOException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry); loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource); runAndVerifyQuickStart_v1_Import(new ImportService(typeDefStore, entityStore, typeRegistry), zipSource);
} }
...@@ -101,7 +101,7 @@ public class ImportServiceTest { ...@@ -101,7 +101,7 @@ public class ImportServiceTest {
@Test(dataProvider = "salesNewTypeAttrs", dependsOnMethods = "importDB1") @Test(dataProvider = "salesNewTypeAttrs", dependsOnMethods = "importDB1")
public void importDB4(ZipSource zipSource) throws AtlasBaseException, IOException { public void importDB4(ZipSource zipSource) throws AtlasBaseException, IOException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry); loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
runImportWithParameters(new ImportService(typeDefStore, entityStore, typeRegistry), getDefaultImportRequest(), zipSource); runImportWithParameters(new ImportService(typeDefStore, entityStore, typeRegistry), getDefaultImportRequest(), zipSource);
} }
...@@ -150,8 +150,8 @@ public class ImportServiceTest { ...@@ -150,8 +150,8 @@ public class ImportServiceTest {
@Test(dataProvider = "ctas") @Test(dataProvider = "ctas")
public void importCTAS(ZipSource zipSource) throws IOException, AtlasBaseException { public void importCTAS(ZipSource zipSource) throws IOException, AtlasBaseException {
loadModelFromJson("0010-base_model.json", typeDefStore, typeRegistry); loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, typeRegistry);
loadModelFromJson("1030-hive_model.json", typeDefStore, typeRegistry); loadModelFromJson("1000-Hadoop/1030-hive_model.json", typeDefStore, typeRegistry);
runImportWithNoParameters(getImportService(), zipSource); runImportWithNoParameters(getImportService(), zipSource);
} }
......
...@@ -688,10 +688,10 @@ ...@@ -688,10 +688,10 @@
<goal>copy-resources</goal> <goal>copy-resources</goal>
</goals> </goals>
<configuration> <configuration>
<outputDirectory>${basedir}/target/models</outputDirectory> <outputDirectory>${basedir}/target/models/1000-Hadoop</outputDirectory>
<resources> <resources>
<resource> <resource>
<directory>${basedir}/../addons/models</directory> <directory>${basedir}/../addons/models/1000-Hadoop</directory>
<filtering>true</filtering> <filtering>true</filtering>
</resource> </resource>
</resources> </resources>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment