Commit 0adfeb18 by Venkat Ranganathan

Fixed hive model: StorageDescription is now a class, removed creating a…

Fixed hive model: StorageDescription is now a class, removed creating a duplicate storagedesc per partition (need to fix handling of partitions that can diverge). Made mem repository test work again. Validated changes with TPCDS. Fixed empty array handling in Graph repo
parent b10f93a2
...@@ -60,6 +60,26 @@ public class HiveImporter { ...@@ -60,6 +60,26 @@ public class HiveImporter {
private List<Id> partitionInstances; private List<Id> partitionInstances;
private List<Id> columnInstances; private List<Id> columnInstances;
private class Pair<L, R> {
final L left;
final R right;
public Pair(L left, R right) {
this.left = left;
this.right = right;
}
public L left() { return this.left;}
public R right() { return this.right;}
}
private class InstancePair extends Pair<ITypedReferenceableInstance, Referenceable> {
public InstancePair(ITypedReferenceableInstance left, Referenceable right) {
super(left, right);
}
}
public HiveImporter(MetadataRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException { public HiveImporter(MetadataRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException {
this(hts, hmc); this(hts, hmc);
...@@ -69,6 +89,7 @@ public class HiveImporter { ...@@ -69,6 +89,7 @@ public class HiveImporter {
} }
this.graphRepository = repo; this.graphRepository = repo;
} }
public HiveImporter(IRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException { public HiveImporter(IRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException {
...@@ -122,10 +143,15 @@ public class HiveImporter { ...@@ -122,10 +143,15 @@ public class HiveImporter {
} }
} }
private Referenceable createInstance(Referenceable ref)
private boolean usingMemRepository() {
return this.graphRepository == null;
}
private InstancePair createInstance(Referenceable ref)
throws MetadataException { throws MetadataException {
if (repository != null) { if (usingMemRepository()) {
return (Referenceable) repository.create(ref); return new InstancePair(repository.create(ref), null);
} else { } else {
String typeName = ref.getTypeName(); String typeName = ref.getTypeName();
IDataType dataType = hiveTypeSystem.getDataType(typeName); IDataType dataType = hiveTypeSystem.getDataType(typeName);
...@@ -136,7 +162,16 @@ public class HiveImporter { ...@@ -136,7 +162,16 @@ public class HiveImporter {
System.out.println("creating instance of type " + typeName + " dataType " + dataType System.out.println("creating instance of type " + typeName + " dataType " + dataType
+ ", guid: " + guid); + ", guid: " + guid);
return new Referenceable(guid, ref.getTypeName(), ref.getValuesMap()); return new InstancePair(null, new Referenceable(guid, ref.getTypeName(), ref.getValuesMap()));
}
}
private void setReferenceInstanceAttribute(Referenceable ref, String attr,
InstancePair instance) {
if (usingMemRepository()) {
ref.set(attr, instance.left());
} else {
ref.set(attr, instance.right());
} }
} }
...@@ -152,15 +187,17 @@ public class HiveImporter { ...@@ -152,15 +187,17 @@ public class HiveImporter {
dbRef.set("parameters", hiveDB.getParameters()); dbRef.set("parameters", hiveDB.getParameters());
dbRef.set("ownerName", hiveDB.getOwnerName()); dbRef.set("ownerName", hiveDB.getOwnerName());
dbRef.set("ownerType", hiveDB.getOwnerType().getValue()); dbRef.set("ownerType", hiveDB.getOwnerType().getValue());
Referenceable dbRefTyped = createInstance(dbRef); InstancePair dbRefTyped = createInstance(dbRef);
dbInstances.add(dbRefTyped.getId()); if (usingMemRepository()) {
dbInstances.add(dbRefTyped.left().getId());
}
importTables(db, dbRefTyped); importTables(db, dbRefTyped);
} catch (Exception e) { } catch (Exception e) {
throw new MetadataException(e); throw new MetadataException(e);
} }
} }
private void importTables(String db, Referenceable dbRefTyped) throws MetadataException { private void importTables(String db, InstancePair dbRefTyped) throws MetadataException {
try { try {
List<String> hiveTables = hiveMetastoreClient.getAllTables(db); List<String> hiveTables = hiveMetastoreClient.getAllTables(db);
...@@ -170,7 +207,7 @@ public class HiveImporter { ...@@ -170,7 +207,7 @@ public class HiveImporter {
Table hiveTable = hiveMetastoreClient.getTable(db, table); Table hiveTable = hiveMetastoreClient.getTable(db, table);
Referenceable tableRef = new Referenceable(HiveTypeSystem.DefinedTypes.HIVE_TABLE.name()); Referenceable tableRef = new Referenceable(HiveTypeSystem.DefinedTypes.HIVE_TABLE.name());
tableRef.set("dbName", dbRefTyped); setReferenceInstanceAttribute(tableRef, "dbName", dbRefTyped);
tableRef.set("tableName", hiveTable.getTableName()); tableRef.set("tableName", hiveTable.getTableName());
tableRef.set("owner", hiveTable.getOwner()); tableRef.set("owner", hiveTable.getOwner());
tableRef.set("createTime", hiveTable.getCreateTime()); tableRef.set("createTime", hiveTable.getCreateTime());
...@@ -178,10 +215,9 @@ public class HiveImporter { ...@@ -178,10 +215,9 @@ public class HiveImporter {
tableRef.set("retention", hiveTable.getRetention()); tableRef.set("retention", hiveTable.getRetention());
StorageDescriptor storageDesc = hiveTable.getSd(); StorageDescriptor storageDesc = hiveTable.getSd();
ITypedStruct sdStruct = fillStorageDescStruct(storageDesc); InstancePair sdRefTyped = fillStorageDescStruct(storageDesc);
tableRef.set("sd", sdStruct); setReferenceInstanceAttribute(tableRef, "sd", sdRefTyped);
tableRef.set("columns", sdStruct.get("cols")); List<InstancePair> partKeys = new ArrayList<>();
List<Referenceable> partKeys = new ArrayList<>();
Referenceable colRef; Referenceable colRef;
if (hiveTable.getPartitionKeysSize() > 0) { if (hiveTable.getPartitionKeysSize() > 0) {
for (FieldSchema fs : hiveTable.getPartitionKeys()) { for (FieldSchema fs : hiveTable.getPartitionKeys()) {
...@@ -189,10 +225,22 @@ public class HiveImporter { ...@@ -189,10 +225,22 @@ public class HiveImporter {
colRef.set("name", fs.getName()); colRef.set("name", fs.getName());
colRef.set("type", fs.getType()); colRef.set("type", fs.getType());
colRef.set("comment", fs.getComment()); colRef.set("comment", fs.getComment());
Referenceable colRefTyped = createInstance(colRef); InstancePair colRefTyped = createInstance(colRef);
partKeys.add(colRefTyped); partKeys.add(colRefTyped);
} }
tableRef.set("partitionKeys", partKeys); if (usingMemRepository()) {
List<ITypedReferenceableInstance> keys = new ArrayList<>();
for (InstancePair ip : partKeys) {
keys.add(ip.left());
}
tableRef.set("partitionKeys", keys);
} else {
List<Referenceable> keys = new ArrayList<>();
for (InstancePair ip : partKeys) {
keys.add(ip.right());
}
tableRef.set("partitionKeys", keys);
}
} }
tableRef.set("parameters", hiveTable.getParameters()); tableRef.set("parameters", hiveTable.getParameters());
if (hiveTable.isSetViewOriginalText()) { if (hiveTable.isSetViewOriginalText()) {
...@@ -204,26 +252,32 @@ public class HiveImporter { ...@@ -204,26 +252,32 @@ public class HiveImporter {
tableRef.set("tableType", hiveTable.getTableType()); tableRef.set("tableType", hiveTable.getTableType());
tableRef.set("temporary", hiveTable.isTemporary()); tableRef.set("temporary", hiveTable.isTemporary());
Referenceable tableRefTyped = createInstance(tableRef); InstancePair tableRefTyped = createInstance(tableRef);
tableInstances.add(tableRefTyped.getId()); if (usingMemRepository()) {
tableInstances.add(tableRefTyped.left().getId());
}
List<Partition> tableParts = hiveMetastoreClient.listPartitions(db, table, Short.MAX_VALUE); List<Partition> tableParts = hiveMetastoreClient.listPartitions(db, table, Short.MAX_VALUE);
hiveMetastoreClient.listPartitionSpecs(db, table, Short.MAX_VALUE);
if (tableParts.size() > 0) { if (tableParts.size() > 0) {
for (Partition hivePart : tableParts) { for (Partition hivePart : tableParts) {
Referenceable partRef = new Referenceable(HiveTypeSystem.DefinedTypes.HIVE_PARTITION.name()); Referenceable partRef = new Referenceable(HiveTypeSystem.DefinedTypes.HIVE_PARTITION.name());
partRef.set("values", hivePart.getValues()); partRef.set("values", hivePart.getValues());
partRef.set("dbName", dbRefTyped); setReferenceInstanceAttribute(partRef, "dbName", dbRefTyped);
partRef.set("tableName", tableRefTyped); setReferenceInstanceAttribute(partRef, "tableName", tableRefTyped);
partRef.set("createTime", hivePart.getCreateTime()); partRef.set("createTime", hivePart.getCreateTime());
partRef.set("lastAccessTime", hivePart.getLastAccessTime()); partRef.set("lastAccessTime", hivePart.getLastAccessTime());
sdStruct = fillStorageDescStruct(hivePart.getSd()); //sdStruct = fillStorageDescStruct(hivePart.getSd());
partRef.set("sd", sdStruct); // Instead of creating copies of the sdstruct for partitions we are reusing existing ones
partRef.set("columns", sdStruct.get("cols")); // will fix to identify partitions with differing schema.
setReferenceInstanceAttribute(partRef, "sd", sdRefTyped);
partRef.set("parameters", hivePart.getParameters()); partRef.set("parameters", hivePart.getParameters());
Referenceable partRefTyped = createInstance(partRef); InstancePair partRefTyped = createInstance(partRef);
partitionInstances.add(partRefTyped.getId()); if (usingMemRepository()) {
partitionInstances.add(partRefTyped.left().getId());
}
} }
} }
} }
...@@ -233,13 +287,12 @@ public class HiveImporter { ...@@ -233,13 +287,12 @@ public class HiveImporter {
} }
private ITypedStruct fillStorageDescStruct(StorageDescriptor storageDesc) throws Exception { private InstancePair fillStorageDescStruct(StorageDescriptor storageDesc) throws Exception {
String storageDescName = HiveTypeSystem.DefinedTypes.HIVE_STORAGEDESC.name(); Referenceable sdRef = new Referenceable(HiveTypeSystem.DefinedTypes.HIVE_STORAGEDESC.name());
SerDeInfo serdeInfo = storageDesc.getSerdeInfo(); SerDeInfo serdeInfo = storageDesc.getSerdeInfo();
// SkewedInfo skewedInfo = storageDesc.getSkewedInfo(); // SkewedInfo skewedInfo = storageDesc.getSkewedInfo();
Struct sdStruct = new Struct(storageDescName);
LOG.debug("Filling storage descriptor information for " + storageDesc); LOG.debug("Filling storage descriptor information for " + storageDesc);
...@@ -254,9 +307,9 @@ public class HiveImporter { ...@@ -254,9 +307,9 @@ public class HiveImporter {
StructType serdeInfotype = (StructType) hiveTypeSystem.getDataType(serdeInfoName); StructType serdeInfotype = (StructType) hiveTypeSystem.getDataType(serdeInfoName);
ITypedStruct serdeInfoStructTyped = ITypedStruct serdeInfoStructTyped =
serdeInfotype.convert(serdeInfoStruct, Multiplicity.OPTIONAL); serdeInfotype.convert(serdeInfoStruct, Multiplicity.OPTIONAL);
sdStruct.set("serdeInfo", serdeInfoStructTyped); sdRef.set("serdeInfo", serdeInfoStructTyped);
// Will need to revisit this after we fix typesystem. // Will need to revisit this after we fix typesystem.
...@@ -276,7 +329,8 @@ public class HiveImporter { ...@@ -276,7 +329,8 @@ public class HiveImporter {
List<Referenceable> fieldsList = new ArrayList<>(); List<InstancePair> fieldsList = new ArrayList<>();
Referenceable colRef; Referenceable colRef;
for (FieldSchema fs : storageDesc.getCols()) { for (FieldSchema fs : storageDesc.getCols()) {
LOG.debug("Processing field " + fs); LOG.debug("Processing field " + fs);
...@@ -284,11 +338,25 @@ public class HiveImporter { ...@@ -284,11 +338,25 @@ public class HiveImporter {
colRef.set("name", fs.getName()); colRef.set("name", fs.getName());
colRef.set("type", fs.getType()); colRef.set("type", fs.getType());
colRef.set("comment", fs.getComment()); colRef.set("comment", fs.getComment());
Referenceable colRefTyped = createInstance(colRef); InstancePair colRefTyped = createInstance(colRef);
fieldsList.add(colRefTyped); fieldsList.add(colRefTyped);
columnInstances.add(colRefTyped.getId()); if (usingMemRepository()) {
columnInstances.add(colRefTyped.left().getId());
}
}
if (usingMemRepository()) {
List<ITypedReferenceableInstance> flds = new ArrayList<>();
for (InstancePair ip : fieldsList) {
flds.add(ip.left());
}
sdRef.set("cols", flds);
} else {
List<Referenceable> flds = new ArrayList<>();
for (InstancePair ip : fieldsList) {
flds.add(ip.right());
}
sdRef.set("cols", flds);
} }
sdStruct.set("cols", fieldsList);
List<ITypedStruct> sortColsStruct = new ArrayList<>(); List<ITypedStruct> sortColsStruct = new ArrayList<>();
...@@ -303,19 +371,20 @@ public class HiveImporter { ...@@ -303,19 +371,20 @@ public class HiveImporter {
sortColsStruct.add(sortColTyped); sortColsStruct.add(sortColTyped);
} }
sdStruct.set("location", storageDesc.getLocation()); sdRef.set("location", storageDesc.getLocation());
sdStruct.set("inputFormat", storageDesc.getInputFormat()); sdRef.set("inputFormat", storageDesc.getInputFormat());
sdStruct.set("outputFormat", storageDesc.getOutputFormat()); sdRef.set("outputFormat", storageDesc.getOutputFormat());
sdStruct.set("compressed", storageDesc.isCompressed()); sdRef.set("compressed", storageDesc.isCompressed());
if (storageDesc.getBucketCols().size() > 0) { if (storageDesc.getBucketCols().size() > 0) {
sdStruct.set("bucketCols", storageDesc.getBucketCols()); sdRef.set("bucketCols", storageDesc.getBucketCols());
} }
if (sortColsStruct.size() > 0) { if (sortColsStruct.size() > 0) {
sdStruct.set("sortCols", sortColsStruct); sdRef.set("sortCols", sortColsStruct);
} }
sdStruct.set("parameters", storageDesc.getParameters()); sdRef.set("parameters", storageDesc.getParameters());
sdStruct.set("storedAsSubDirectories", storageDesc.isStoredAsSubDirectories()); sdRef.set("storedAsSubDirectories", storageDesc.isStoredAsSubDirectories());
StructType storageDesctype = (StructType) hiveTypeSystem.getDataType(storageDescName); InstancePair sdRefTyped = createInstance(sdRef);
return storageDesctype.convert(sdStruct, Multiplicity.OPTIONAL);
return sdRefTyped;
} }
} }
...@@ -61,7 +61,6 @@ public class HiveTypeSystem { ...@@ -61,7 +61,6 @@ public class HiveTypeSystem {
// Structs // Structs
HIVE_SERDE, HIVE_SERDE,
HIVE_STORAGEDESC,
HIVE_SKEWEDINFO, HIVE_SKEWEDINFO,
HIVE_ORDER, HIVE_ORDER,
HIVE_RESOURCEURI, HIVE_RESOURCEURI,
...@@ -69,6 +68,7 @@ public class HiveTypeSystem { ...@@ -69,6 +68,7 @@ public class HiveTypeSystem {
// Classes // Classes
HIVE_DB, HIVE_DB,
HIVE_STORAGEDESC,
HIVE_TABLE, HIVE_TABLE,
HIVE_COLUMN, HIVE_COLUMN,
HIVE_PARTITION, HIVE_PARTITION,
...@@ -122,7 +122,7 @@ public class HiveTypeSystem { ...@@ -122,7 +122,7 @@ public class HiveTypeSystem {
//createSkewedInfoStruct(); //createSkewedInfoStruct();
createOrderStruct(); createOrderStruct();
createResourceUriStruct(); createResourceUriStruct();
createStorageDescStruct(); createStorageDescClass();
createDBClass(); createDBClass();
createTypeClass(); createTypeClass();
...@@ -168,6 +168,7 @@ public class HiveTypeSystem { ...@@ -168,6 +168,7 @@ public class HiveTypeSystem {
if (valid) { if (valid) {
return ImmutableList.of( return ImmutableList.of(
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_DB.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_DB.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_STORAGEDESC.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_TABLE.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_TABLE.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_COLUMN.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_COLUMN.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_PARTITION.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_PARTITION.name()),
...@@ -300,7 +301,7 @@ public class HiveTypeSystem { ...@@ -300,7 +301,7 @@ public class HiveTypeSystem {
private void createStorageDescStruct() throws MetadataException { private void createStorageDescClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("cols", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()), Multiplicity.COLLECTION, false, null), new AttributeDefinition("cols", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()), Multiplicity.COLLECTION, false, null),
new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
...@@ -316,11 +317,10 @@ public class HiveTypeSystem { ...@@ -316,11 +317,10 @@ public class HiveTypeSystem {
new AttributeDefinition("storedAsSubDirectories", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("storedAsSubDirectories", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
}; };
HierarchicalTypeDefinition<ClassType> definition =
StructTypeDefinition definition = new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_STORAGEDESC.name(),
new StructTypeDefinition(DefinedTypes.HIVE_STORAGEDESC.name(), attributeDefinitions); null, attributeDefinitions);
classTypeDefinitions.put(DefinedTypes.HIVE_STORAGEDESC.name(), definition);
structTypeDefinitionMap.put(DefinedTypes.HIVE_STORAGEDESC.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_STORAGEDESC.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_STORAGEDESC.name());
} }
...@@ -401,8 +401,8 @@ public class HiveTypeSystem { ...@@ -401,8 +401,8 @@ public class HiveTypeSystem {
new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("columns", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()), //new AttributeDefinition("columns", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()),
Multiplicity.COLLECTION, true, null), // Multiplicity.COLLECTION, true, null),
new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null),
}; };
...@@ -426,8 +426,8 @@ public class HiveTypeSystem { ...@@ -426,8 +426,8 @@ public class HiveTypeSystem {
new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("partitionKeys", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()), new AttributeDefinition("partitionKeys", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("columns", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()), //new AttributeDefinition("columns", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()),
Multiplicity.COLLECTION, true, null), // Multiplicity.COLLECTION, true, null),
new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewExpandedText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("viewExpandedText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
......
...@@ -126,7 +126,8 @@ ...@@ -126,7 +126,8 @@
<property> <property>
<name>hive.metastore.uris</name> <name>hive.metastore.uris</name>
<value>thrift://10.10.11.207:9083</value> <!-- <value>thrift://10.10.11.207:9083</value> -->
<value>thrift://localhost:9083</value>
</property> </property>
<property> <property>
......
...@@ -39,6 +39,7 @@ import org.testng.annotations.BeforeClass; ...@@ -39,6 +39,7 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter; import java.io.FileWriter;
import java.util.List; import java.util.List;
...@@ -82,7 +83,9 @@ public class HiveGraphRepositoryTest { ...@@ -82,7 +83,9 @@ public class HiveGraphRepositoryTest {
HiveImporter hImporter = new HiveImporter(repository, hts, new HiveMetaStoreClient(new HiveConf())); HiveImporter hImporter = new HiveImporter(repository, hts, new HiveMetaStoreClient(new HiveConf()));
hImporter.importHiveMetadata(); hImporter.importHiveMetadata();
LOG.info("Defined DB instances"); LOG.info("Defined DB instances");
FileWriter fw = new FileWriter("hiveobjs.txt"); File f = new File("./target/logs/hiveobjs.txt");
f.getParentFile().mkdirs();
FileWriter fw = new FileWriter(f);
BufferedWriter bw = new BufferedWriter(fw); BufferedWriter bw = new BufferedWriter(fw);
List<String> idList = List<String> idList =
repository.getEntityList(HiveTypeSystem.DefinedTypes.HIVE_DB.name()); repository.getEntityList(HiveTypeSystem.DefinedTypes.HIVE_DB.name());
......
...@@ -32,6 +32,7 @@ import org.testng.annotations.BeforeClass; ...@@ -32,6 +32,7 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
...@@ -59,8 +60,9 @@ public class HiveTypeSystemTest { ...@@ -59,8 +60,9 @@ public class HiveTypeSystemTest {
HiveImporter hImporter = new HiveImporter(mr, hts, new HiveMetaStoreClient(new HiveConf())); HiveImporter hImporter = new HiveImporter(mr, hts, new HiveMetaStoreClient(new HiveConf()));
hImporter.importHiveMetadata(); hImporter.importHiveMetadata();
LOG.info("Defined DB instances"); LOG.info("Defined DB instances");
FileWriter fw = new FileWriter("hiveobjs.txt"); File f = new File("./target/logs/hiveobjs.txt");
BufferedWriter bw = new BufferedWriter(fw); f.getParentFile().mkdirs();
FileWriter fw = new FileWriter(f); BufferedWriter bw = new BufferedWriter(fw);
for (Id id : hImporter.getDBInstances()) { for (Id id : hImporter.getDBInstances()) {
ITypedReferenceableInstance instance = mr.get(id); ITypedReferenceableInstance instance = mr.get(id);
bw.write(instance.toString()); bw.write(instance.toString());
......
...@@ -932,7 +932,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -932,7 +932,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name); LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name);
String propertyName = typedInstance.getTypeName() + "." + attributeInfo.name; String propertyName = typedInstance.getTypeName() + "." + attributeInfo.name;
String keys = instanceVertex.getProperty(propertyName); String keys = instanceVertex.getProperty(propertyName);
if (keys == null || keys.length() == 0) {
return;
}
DataTypes.ArrayType arrayType = (DataTypes.ArrayType) attributeInfo.dataType(); DataTypes.ArrayType arrayType = (DataTypes.ArrayType) attributeInfo.dataType();
final IDataType elementType = arrayType.getElemType(); final IDataType elementType = arrayType.getElemType();
...@@ -983,7 +985,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -983,7 +985,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name); LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name);
String propertyName = typedInstance.getTypeName() + "." + attributeInfo.name; String propertyName = typedInstance.getTypeName() + "." + attributeInfo.name;
String keys = instanceVertex.getProperty(propertyName); String keys = instanceVertex.getProperty(propertyName);
if (keys == null || keys.length() == 0) {
return;
}
DataTypes.MapType mapType = (DataTypes.MapType) attributeInfo.dataType(); DataTypes.MapType mapType = (DataTypes.MapType) attributeInfo.dataType();
final IDataType elementType = mapType.getValueType(); final IDataType elementType = mapType.getValueType();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment