Commit d3db08d2 by Venkat Ranganathan

Minor cleanup - walk instances by type - set log level to INFO by default

parent 61079cac
...@@ -46,14 +46,23 @@ public class HiveImporter { ...@@ -46,14 +46,23 @@ public class HiveImporter {
private IRepository repository; private IRepository repository;
private HiveTypeSystem hiveTypeSystem; private HiveTypeSystem hiveTypeSystem;
private List<Id> instances; private List<Id> dbInstances;
private List<Id> tableInstances;
private List<Id> partitionInstances;
private List<Id> columnInstances;
public HiveImporter(IRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException { public HiveImporter(IRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException {
this.repository = repo; this.repository = repo;
this.hiveMetastoreClient = hmc; this.hiveMetastoreClient = hmc;
this.hiveTypeSystem = hts; this.hiveTypeSystem = hts;
typeSystem = TypeSystem.getInstance(); typeSystem = TypeSystem.getInstance();
instances = new ArrayList<>(); dbInstances = new ArrayList<>();
tableInstances = new ArrayList<>();
partitionInstances = new ArrayList<>();
columnInstances = new ArrayList<>();
if (repository == null) { if (repository == null) {
LOG.error("repository is null"); LOG.error("repository is null");
throw new RuntimeException("repository is null"); throw new RuntimeException("repository is null");
...@@ -64,8 +73,20 @@ public class HiveImporter { ...@@ -64,8 +73,20 @@ public class HiveImporter {
} }
public List<Id> getInstances() { public List<Id> getDBInstances() {
return instances; return dbInstances;
}
public List<Id> getTableInstances() {
return tableInstances;
}
public List<Id> getPartitionInstances() {
return partitionInstances;
}
public List<Id> getColumnInstances() {
return columnInstances;
} }
public void importHiveMetadata() throws MetadataException { public void importHiveMetadata() throws MetadataException {
...@@ -94,7 +115,7 @@ public class HiveImporter { ...@@ -94,7 +115,7 @@ public class HiveImporter {
dbRef.set("ownerName", hiveDB.getOwnerName()); dbRef.set("ownerName", hiveDB.getOwnerName());
dbRef.set("ownerType", hiveDB.getOwnerType().toString()); dbRef.set("ownerType", hiveDB.getOwnerType().toString());
ITypedReferenceableInstance dbRefTyped = repository.create(dbRef); ITypedReferenceableInstance dbRefTyped = repository.create(dbRef);
instances.add(dbRefTyped.getId()); dbInstances.add(dbRefTyped.getId());
importTables(db, dbRefTyped); importTables(db, dbRefTyped);
} catch (NoSuchObjectException nsoe) { } catch (NoSuchObjectException nsoe) {
throw new MetadataException(nsoe); throw new MetadataException(nsoe);
...@@ -148,7 +169,7 @@ public class HiveImporter { ...@@ -148,7 +169,7 @@ public class HiveImporter {
tableRef.set("temporary", hiveTable.isTemporary()); tableRef.set("temporary", hiveTable.isTemporary());
ITypedReferenceableInstance tableRefTyped = repository.create(tableRef); ITypedReferenceableInstance tableRefTyped = repository.create(tableRef);
instances.add(tableRefTyped.getId()); tableInstances.add(tableRefTyped.getId());
List<Partition> tableParts = hiveMetastoreClient.listPartitions(db, table, Short.MAX_VALUE); List<Partition> tableParts = hiveMetastoreClient.listPartitions(db, table, Short.MAX_VALUE);
...@@ -165,7 +186,7 @@ public class HiveImporter { ...@@ -165,7 +186,7 @@ public class HiveImporter {
partRef.set("sd", sdStruct); partRef.set("sd", sdStruct);
partRef.set("parameters", hivePart.getParameters()); partRef.set("parameters", hivePart.getParameters());
ITypedReferenceableInstance partRefTyped = repository.create(partRef); ITypedReferenceableInstance partRefTyped = repository.create(partRef);
instances.add(partRefTyped.getId()); partitionInstances.add(partRefTyped.getId());
} }
} }
} }
...@@ -186,7 +207,7 @@ public class HiveImporter { ...@@ -186,7 +207,7 @@ public class HiveImporter {
Struct sdStruct = new Struct(storageDescName); Struct sdStruct = new Struct(storageDescName);
LOG.info("Filling storage descriptor information for " + storageDesc); LOG.debug("Filling storage descriptor information for " + storageDesc);
String serdeInfoName = HiveTypeSystem.DefinedTypes.HIVE_SERDE.name(); String serdeInfoName = HiveTypeSystem.DefinedTypes.HIVE_SERDE.name();
Struct serdeInfoStruct = new Struct(serdeInfoName); Struct serdeInfoStruct = new Struct(serdeInfoName);
...@@ -195,7 +216,7 @@ public class HiveImporter { ...@@ -195,7 +216,7 @@ public class HiveImporter {
serdeInfoStruct.set("serializationLib", serdeInfo.getSerializationLib()); serdeInfoStruct.set("serializationLib", serdeInfo.getSerializationLib());
serdeInfoStruct.set("parameters", serdeInfo.getParameters()); serdeInfoStruct.set("parameters", serdeInfo.getParameters());
LOG.info("serdeInfo = " + serdeInfo); LOG.debug("serdeInfo = " + serdeInfo);
StructType serdeInfotype = (StructType) hiveTypeSystem.getDataType(serdeInfoName); StructType serdeInfotype = (StructType) hiveTypeSystem.getDataType(serdeInfoName);
ITypedStruct serdeInfoStructTyped = ITypedStruct serdeInfoStructTyped =
...@@ -231,6 +252,7 @@ public class HiveImporter { ...@@ -231,6 +252,7 @@ public class HiveImporter {
colRef.set("comment", fs.getComment()); colRef.set("comment", fs.getComment());
ITypedReferenceableInstance colRefTyped = repository.create(colRef); ITypedReferenceableInstance colRefTyped = repository.create(colRef);
fieldsList.add(colRefTyped); fieldsList.add(colRefTyped);
columnInstances.add(colRefTyped.getId());
} }
sdStruct.set("cols", fieldsList); sdStruct.set("cols", fieldsList);
......
...@@ -292,7 +292,7 @@ public class HiveTypeSystem { ...@@ -292,7 +292,7 @@ public class HiveTypeSystem {
new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("numBuckets", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("numBuckets", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("serdeInfo", DefinedTypes.HIVE_SERDE.name(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("serdeInfo", DefinedTypes.HIVE_SERDE.name(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("bucketCols", String.format("array<%s>",DataTypes.STRING_TYPE.getName()), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("bucketCols", String.format("array<%s>", DataTypes.STRING_TYPE.getName()), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sortCols", String.format("array<%s>", DefinedTypes.HIVE_ORDER.name()), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("sortCols", String.format("array<%s>", DefinedTypes.HIVE_ORDER.name()), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null),
//new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.name(), Multiplicity.OPTIONAL, false, null), //new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.name(), Multiplicity.OPTIONAL, false, null),
......
...@@ -16,13 +16,13 @@ ...@@ -16,13 +16,13 @@
# limitations under the License. # limitations under the License.
# #
org.apache.hadoop.metadata=DEBUG, console org.apache.hadoop.metadata=INFO, console
org.apache.hadoop=INFO, console org.apache.hadoop=INFO, console
org.apache.hive=INFO, console org.apache.hive=INFO, console
org.apache.hcatalog=INFO, console org.apache.hcatalog=INFO, console
metadata.root.logger=DEBUG,console,DRFA metadata.root.logger=INFO,console,DRFA
hive.root.logger=INFO,console,DRFA hive.root.logger=INFO,console,DRFA
hcatalog.root.logger=INFO,console,DRFA hcatalog.root.logger=INFO,console,DRFA
metadata.log.dir=${user.dir}/metadata/logs metadata.log.dir=${user.dir}/metadata/logs
......
...@@ -52,8 +52,23 @@ public class HiveTypeSystemTest { ...@@ -52,8 +52,23 @@ public class HiveTypeSystemTest {
HiveImporter himport = new HiveImporter(mr, hts, new HiveMetaStoreClient(new HiveConf())); HiveImporter himport = new HiveImporter(mr, hts, new HiveMetaStoreClient(new HiveConf()));
himport.importHiveMetadata(); himport.importHiveMetadata();
LOG.info("Defined instances"); LOG.info("Defined DB instances");
for (Id id : himport.getInstances()) { for (Id id : himport.getDBInstances()) {
ITypedReferenceableInstance instance = mr.get(id);
LOG.info(instance.toString());
}
LOG.info("Defined Table instances");
for (Id id : himport.getTableInstances()) {
ITypedReferenceableInstance instance = mr.get(id);
LOG.info(instance.toString());
}
LOG.info("Defined Partition instances");
for (Id id : himport.getPartitionInstances()) {
ITypedReferenceableInstance instance = mr.get(id);
LOG.info(instance.toString());
}
LOG.info("Defined Column instances");
for (Id id : himport.getColumnInstances()) {
ITypedReferenceableInstance instance = mr.get(id); ITypedReferenceableInstance instance = mr.get(id);
LOG.info(instance.toString()); LOG.info(instance.toString());
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment