Commit 7edf5221 by Venkat Ranganathan

Populate indexes if any in the DB. Create a new process type

parent ee060fcc
...@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.hivetypes; ...@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.hivetypes;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.Partition;
...@@ -58,7 +59,10 @@ public class HiveImporter { ...@@ -58,7 +59,10 @@ public class HiveImporter {
private List<Id> dbInstances; private List<Id> dbInstances;
private List<Id> tableInstances; private List<Id> tableInstances;
private List<Id> partitionInstances; private List<Id> partitionInstances;
private List<Id> indexInstances;
private List<Id> columnInstances; private List<Id> columnInstances;
private List<Id> processInstances;
private class Pair<L, R> { private class Pair<L, R> {
...@@ -111,7 +115,10 @@ public class HiveImporter { ...@@ -111,7 +115,10 @@ public class HiveImporter {
dbInstances = new ArrayList<>(); dbInstances = new ArrayList<>();
tableInstances = new ArrayList<>(); tableInstances = new ArrayList<>();
partitionInstances = new ArrayList<>(); partitionInstances = new ArrayList<>();
indexInstances = new ArrayList<>();
columnInstances = new ArrayList<>(); columnInstances = new ArrayList<>();
processInstances = new ArrayList<>();
} }
public List<Id> getDBInstances() { public List<Id> getDBInstances() {
...@@ -130,6 +137,14 @@ public class HiveImporter { ...@@ -130,6 +137,14 @@ public class HiveImporter {
return columnInstances; return columnInstances;
} }
public List<Id> getIndexInstances() {
return indexInstances;
}
public List<Id> getProcessInstances() {
return processInstances;
}
public void importHiveMetadata() throws MetadataException { public void importHiveMetadata() throws MetadataException {
LOG.info("Importing hive metadata"); LOG.info("Importing hive metadata");
...@@ -175,6 +190,7 @@ public class HiveImporter { ...@@ -175,6 +190,7 @@ public class HiveImporter {
} }
} }
private void importDatabase(String db) throws MetadataException { private void importDatabase(String db) throws MetadataException {
try { try {
LOG.info("Importing objects from database : " + db); LOG.info("Importing objects from database : " + db);
...@@ -280,6 +296,29 @@ public class HiveImporter { ...@@ -280,6 +296,29 @@ public class HiveImporter {
} }
} }
} }
List<Index> indexes = hiveMetastoreClient.listIndexes(db, table, Short.MAX_VALUE);
if (indexes.size() > 0 ) {
for (Index index : indexes) {
Referenceable indexRef = new Referenceable(HiveTypeSystem.DefinedTypes.HIVE_INDEX.name());
indexRef.set("indexName", index.getIndexName());
indexRef.set("indexHandlerClass", index.getIndexHandlerClass());
setReferenceInstanceAttribute(indexRef, "dbName", dbRefTyped);
indexRef.set("createTime", index.getCreateTime());
indexRef.set("lastAccessTime", index.getLastAccessTime());
indexRef.set("origTableName", index.getOrigTableName());
indexRef.set("indexTableName", index.getIndexTableName());
sdRefTyped = fillStorageDescStruct(index.getSd());
setReferenceInstanceAttribute(indexRef, "sd", sdRefTyped);
indexRef.set("parameters", index.getParameters());
tableRef.set("deferredRebuild", index.isDeferredRebuild());
InstancePair indexRefTyped = createInstance(indexRef);
if (usingMemRepository()) {
indexInstances.add(indexRefTyped.left().getId());
}
}
}
} }
} catch (Exception te) { } catch (Exception te) {
throw new MetadataException(te); throw new MetadataException(te);
......
...@@ -76,6 +76,7 @@ public class HiveTypeSystem { ...@@ -76,6 +76,7 @@ public class HiveTypeSystem {
HIVE_FUNCTION, HIVE_FUNCTION,
HIVE_ROLE, HIVE_ROLE,
HIVE_TYPE, HIVE_TYPE,
HIVE_PROCESS,
//HIVE_VIEW, //HIVE_VIEW,
} }
...@@ -132,6 +133,7 @@ public class HiveTypeSystem { ...@@ -132,6 +133,7 @@ public class HiveTypeSystem {
createIndexClass(); createIndexClass();
createFunctionClass(); createFunctionClass();
createRoleClass(); createRoleClass();
createProcessClass();
for (EnumTypeDefinition def : getEnumTypeDefinitions()) { for (EnumTypeDefinition def : getEnumTypeDefinitions()) {
enumTypes.add(typeSystem.defineEnumType(def)); enumTypes.add(typeSystem.defineEnumType(def));
...@@ -174,7 +176,8 @@ public class HiveTypeSystem { ...@@ -174,7 +176,8 @@ public class HiveTypeSystem {
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_PARTITION.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_PARTITION.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_INDEX.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_INDEX.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_FUNCTION.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_FUNCTION.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_ROLE.name()) (HierarchicalType) typeMap.get(DefinedTypes.HIVE_ROLE.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_PROCESS.name())
); );
} else { } else {
return ImmutableList.of(); return ImmutableList.of();
...@@ -445,12 +448,12 @@ public class HiveTypeSystem { ...@@ -445,12 +448,12 @@ public class HiveTypeSystem {
private void createIndexClass() throws MetadataException { private void createIndexClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("indexName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("indexName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("indexHandleClass", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("indexHandlerClass", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("dbName", DefinedTypes.HIVE_DB.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("dbName", DefinedTypes.HIVE_DB.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("origTableName", DefinedTypes.HIVE_TABLE.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("origTableName", DefinedTypes.HIVE_TABLE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("indexTableName", DefinedTypes.HIVE_TABLE.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("indexTableName", DefinedTypes.HIVE_TABLE.name(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("deferredRebuild", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("deferredRebuild", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
...@@ -499,4 +502,24 @@ public class HiveTypeSystem { ...@@ -499,4 +502,24 @@ public class HiveTypeSystem {
} }
private void createProcessClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("processName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("startTime", DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("endTime", DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("userName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("sourceTableNames", String.format("array<%s>", DefinedTypes.HIVE_TABLE.name()), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("targetTableNames", String.format("array<%s>", DefinedTypes.HIVE_TABLE.name()), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("jobDefinition", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_ROLE.name(),
null, attributeDefinitions);
classTypeDefinitions.put(DefinedTypes.HIVE_PROCESS.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_PROCESS.name());
}
} }
...@@ -117,6 +117,22 @@ public class HiveGraphRepositoryTest { ...@@ -117,6 +117,22 @@ public class HiveGraphRepositoryTest {
ITypedReferenceableInstance instance = repository.getEntityDefinition(id); ITypedReferenceableInstance instance = repository.getEntityDefinition(id);
bw.write(instance.toString()); bw.write(instance.toString());
} }
LOG.info("Defined Index instances");
idList =
repository.getEntityList(HiveTypeSystem.DefinedTypes.HIVE_INDEX.name());
for (String id : idList) {
ITypedReferenceableInstance instance = repository.getEntityDefinition(id);
bw.write(instance.toString());
}
LOG.info("Defined Process instances");
idList =
repository.getEntityList(HiveTypeSystem.DefinedTypes.HIVE_PROCESS.name());
for (String id : idList) {
ITypedReferenceableInstance instance = repository.getEntityDefinition(id);
bw.write(instance.toString());
}
bw.flush(); bw.flush();
bw.close(); bw.close();
} }
......
...@@ -82,6 +82,16 @@ public class HiveTypeSystemTest { ...@@ -82,6 +82,16 @@ public class HiveTypeSystemTest {
ITypedReferenceableInstance instance = mr.get(id); ITypedReferenceableInstance instance = mr.get(id);
bw.write(instance.toString()); bw.write(instance.toString());
} }
LOG.info("Defined Index instances");
for (Id id : hImporter.getIndexInstances()) {
ITypedReferenceableInstance instance = mr.get(id);
bw.write(instance.toString());
}
LOG.info("Defined Process instances");
for (Id id : hImporter.getProcessInstances()) {
ITypedReferenceableInstance instance = mr.get(id);
bw.write(instance.toString());
}
bw.flush(); bw.flush();
bw.close(); bw.close();
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment