Commit ac594b24 by Aaron Dossett

Update applications.properties to use 'hive_table' instead of 'Table'

Update Hive Table type to use 'name' instead of 'tableName' Add Columns back to Hive Tables
parent 4543c837
...@@ -192,7 +192,7 @@ public class HiveMetaStoreBridge { ...@@ -192,7 +192,7 @@ public class HiveMetaStoreBridge {
//todo DSL support for reference doesn't work. is the usage right? //todo DSL support for reference doesn't work. is the usage right?
// String query = String.format("%s where dbName = \"%s\" and tableName = \"%s\"", typeName, dbRef.getId().id, // String query = String.format("%s where dbName = \"%s\" and tableName = \"%s\"", typeName, dbRef.getId().id,
// tableName); // tableName);
String query = String.format("%s where tableName = \"%s\"", typeName, tableName); String query = String.format("%s where name = \"%s\"", typeName, tableName);
JSONArray results = dgiClient.searchByDSL(query); JSONArray results = dgiClient.searchByDSL(query);
if (results.length() == 0) { if (results.length() == 0) {
return null; return null;
...@@ -223,6 +223,7 @@ public class HiveMetaStoreBridge { ...@@ -223,6 +223,7 @@ public class HiveMetaStoreBridge {
} }
public Referenceable registerTable(Referenceable dbReference, String dbName, String tableName) throws Exception { public Referenceable registerTable(Referenceable dbReference, String dbName, String tableName) throws Exception {
LOG.info("Attempting to register table [" + tableName + "]");
Referenceable tableRef = getTableReference(dbReference, tableName); Referenceable tableRef = getTableReference(dbReference, tableName);
if (tableRef == null) { if (tableRef == null) {
LOG.info("Importing objects from " + dbName + "." + tableName); LOG.info("Importing objects from " + dbName + "." + tableName);
...@@ -230,7 +231,7 @@ public class HiveMetaStoreBridge { ...@@ -230,7 +231,7 @@ public class HiveMetaStoreBridge {
Table hiveTable = hiveClient.getTable(dbName, tableName); Table hiveTable = hiveClient.getTable(dbName, tableName);
tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName()); tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
tableRef.set("tableName", hiveTable.getTableName()); tableRef.set("name", hiveTable.getTableName());
tableRef.set("owner", hiveTable.getOwner()); tableRef.set("owner", hiveTable.getOwner());
//todo fix //todo fix
tableRef.set("createTime", hiveTable.getLastAccessTime()); tableRef.set("createTime", hiveTable.getLastAccessTime());
...@@ -274,8 +275,8 @@ public class HiveMetaStoreBridge { ...@@ -274,8 +275,8 @@ public class HiveMetaStoreBridge {
tableRef.set("tableType", hiveTable.getTableType()); tableRef.set("tableType", hiveTable.getTableType());
tableRef.set("temporary", hiveTable.isTemporary()); tableRef.set("temporary", hiveTable.isTemporary());
// List<Referenceable> fieldsList = getColumns(storageDesc); List<Referenceable> colList = getColumns(hiveTable.getAllCols());
// tableRef.set("columns", fieldsList); tableRef.set("columns", colList);
tableRef = createInstance(tableRef); tableRef = createInstance(tableRef);
} else { } else {
...@@ -397,7 +398,7 @@ public class HiveMetaStoreBridge { ...@@ -397,7 +398,7 @@ public class HiveMetaStoreBridge {
} }
*/ */
List<Referenceable> fieldsList = getColumns(storageDesc); List<Referenceable> fieldsList = getColumns(storageDesc.getCols());
sdReferenceable.set("cols", fieldsList); sdReferenceable.set("cols", fieldsList);
List<Struct> sortColsStruct = new ArrayList<>(); List<Struct> sortColsStruct = new ArrayList<>();
...@@ -428,19 +429,19 @@ public class HiveMetaStoreBridge { ...@@ -428,19 +429,19 @@ public class HiveMetaStoreBridge {
return createInstance(sdReferenceable); return createInstance(sdReferenceable);
} }
private List<Referenceable> getColumns(StorageDescriptor storageDesc) throws Exception { private List<Referenceable> getColumns(List<FieldSchema> schemaList) throws Exception
List<Referenceable> fieldsList = new ArrayList<>(); {
Referenceable colReferenceable; List<Referenceable> colList = new ArrayList<>();
for (FieldSchema fs : storageDesc.getCols()) { for (FieldSchema fs : schemaList) {
LOG.debug("Processing field " + fs); LOG.debug("Processing field " + fs);
colReferenceable = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName()); Referenceable colReferenceable = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
colReferenceable.set("name", fs.getName()); colReferenceable.set("name", fs.getName());
colReferenceable.set("type", fs.getType()); colReferenceable.set("type", fs.getType());
colReferenceable.set("comment", fs.getComment()); colReferenceable.set("comment", fs.getComment());
fieldsList.add(createInstance(colReferenceable)); colList.add(createInstance(colReferenceable));
} }
return fieldsList; return colList;
} }
public synchronized void registerHiveDataModel() throws Exception { public synchronized void registerHiveDataModel() throws Exception {
......
...@@ -367,7 +367,7 @@ public class HiveDataModelGenerator { ...@@ -367,7 +367,7 @@ public class HiveDataModelGenerator {
private void createTableClass() throws MetadataException { private void createTableClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("tableName", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(), new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
...@@ -384,9 +384,9 @@ public class HiveDataModelGenerator { ...@@ -384,9 +384,9 @@ public class HiveDataModelGenerator {
new AttributeDefinition("partitionKeys", new AttributeDefinition("partitionKeys",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()), DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
// new AttributeDefinition("columns", new AttributeDefinition("columns",
// DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()), DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
// Multiplicity.COLLECTION, true, null), Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(),
......
...@@ -148,9 +148,10 @@ public class HiveLineageService implements LineageService { ...@@ -148,9 +148,10 @@ public class HiveLineageService implements LineageService {
public String getSchema(String tableName) throws DiscoveryException { public String getSchema(String tableName) throws DiscoveryException {
// todo - validate if indeed this is a table type and exists // todo - validate if indeed this is a table type and exists
String schemaQuery = HIVE_TABLE_TYPE_NAME String schemaQuery = HIVE_TABLE_TYPE_NAME
+ " where name=\"" + tableName + "\", " + " where name=\"" + tableName + "\""
+ HIVE_TABLE_COLUMNS_ATTRIBUTE_NAME; + ", " + HIVE_TABLE_COLUMNS_ATTRIBUTE_NAME
// + " as column select column.name, column.dataType, column.comment"; // + " as column select column.name, column.dataType, column.comment"
;
return discoveryService.searchByDSL(schemaQuery); return discoveryService.searchByDSL(schemaQuery);
} }
} }
...@@ -30,7 +30,8 @@ metadata.graph.index.search.elasticsearch.create.sleep=2000 ...@@ -30,7 +30,8 @@ metadata.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs ######### ######### Hive Lineage Configs #########
# This models follows the quick-start guide # This models follows the quick-start guide
metadata.lineage.hive.table.type.name=Table #FOOBAR
metadata.lineage.hive.table.type.name=hive_table
metadata.lineage.hive.column.type.name=Column metadata.lineage.hive.column.type.name=Column
metadata.lineage.hive.table.column.name=columns metadata.lineage.hive.table.column.name=columns
metadata.lineage.hive.process.type.name=LoadProcess metadata.lineage.hive.process.type.name=LoadProcess
......
...@@ -326,6 +326,10 @@ ...@@ -326,6 +326,10 @@
<name>metadata.home</name> <name>metadata.home</name>
<value>${project.build.directory}</value> <value>${project.build.directory}</value>
</systemProperty> </systemProperty>
<systemProperty>
<name>metadata.conf</name>
<value>${project.build.directory}/../../webapp/src/main/resources</value>
</systemProperty>
</systemProperties> </systemProperties>
<stopKey>metadata-stop</stopKey> <stopKey>metadata-stop</stopKey>
<stopPort>41001</stopPort> <stopPort>41001</stopPort>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment