Commit a86f0f3e by Venkatesh Seetharam

Add Hive Bridge with major refactoring

parent e747e2ae
......@@ -34,10 +34,22 @@
<properties>
<hive.version>0.14.0</hive.version>
<hadoop.version>2.5.0</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-metastore</artifactId>
<version>${hive.version}</version>
......@@ -96,11 +108,6 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.hive.bridge;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
/**
* A Bridge Utility that imports metadata from the Hive Meta Store
* and registers then in DGI.
*/
public class HiveMetaStoreBridge {
private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreBridge.class);
private final HiveMetaStoreClient hiveMetaStoreClient;
private final MetadataServiceClient metadataServiceClient;
/**
* Construct a HiveMetaStoreBridge.
* @param baseUrl metadata service url
*/
public HiveMetaStoreBridge(String baseUrl) throws Exception {
hiveMetaStoreClient = createHiveMetaStoreClient();
metadataServiceClient = new MetadataServiceClient(baseUrl);
}
private HiveMetaStoreClient createHiveMetaStoreClient() throws Exception {
HiveConf conf = new HiveConf();
return new HiveMetaStoreClient(conf);
}
public void importHiveMetadata() throws Exception {
LOG.info("Importing hive metadata");
importDatabases();
}
private void importDatabases() throws Exception {
List<String> databases = hiveMetaStoreClient.getAllDatabases();
for (String databaseName : databases) {
importDatabase(databaseName);
}
}
private void importDatabase(String databaseName) throws Exception {
LOG.info("Importing objects from databaseName : " + databaseName);
Database hiveDB = hiveMetaStoreClient.getDatabase(databaseName);
Referenceable dbRef = new Referenceable(HiveDataTypes.HIVE_DB.name());
dbRef.set("name", hiveDB.getName());
dbRef.set("description", hiveDB.getDescription());
dbRef.set("locationUri", hiveDB.getLocationUri());
dbRef.set("parameters", hiveDB.getParameters());
dbRef.set("ownerName", hiveDB.getOwnerName());
dbRef.set("ownerType", hiveDB.getOwnerType().getValue());
Referenceable databaseReferenceable = createInstance(dbRef);
importTables(databaseName, databaseReferenceable);
}
private Referenceable createInstance(Referenceable referenceable) throws Exception {
String typeName = referenceable.getTypeName();
LOG.debug("creating instance of type " + typeName);
String entityJSON = InstanceSerialization.toJson(referenceable, true);
LOG.debug("Submitting new entity= " + entityJSON);
JSONObject jsonObject = metadataServiceClient.createEntity(entityJSON);
String guid = jsonObject.getString(MetadataServiceClient.RESULTS);
LOG.debug("created instance for type " + typeName + ", guid: " + guid);
return new Referenceable(guid, referenceable.getTypeName(), referenceable.getValuesMap());
}
private void importTables(String databaseName,
Referenceable databaseReferenceable) throws Exception {
List<String> hiveTables = hiveMetaStoreClient.getAllTables(databaseName);
for (String tableName : hiveTables) {
importTable(databaseName, tableName, databaseReferenceable);
}
}
private void importTable(String db, String tableName,
Referenceable databaseReferenceable) throws Exception {
LOG.info("Importing objects from " + db + "." + tableName);
Table hiveTable = hiveMetaStoreClient.getTable(db, tableName);
Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.name());
tableRef.set("tableName", hiveTable.getTableName());
tableRef.set("owner", hiveTable.getOwner());
tableRef.set("createTime", hiveTable.getCreateTime());
tableRef.set("lastAccessTime", hiveTable.getLastAccessTime());
tableRef.set("retention", hiveTable.getRetention());
// add reference to the database
tableRef.set("dbName", databaseReferenceable);
// add reference to the StorageDescriptor
StorageDescriptor storageDesc = hiveTable.getSd();
Referenceable sdReferenceable = fillStorageDescStruct(storageDesc);
tableRef.set("sd", sdReferenceable);
// add reference to the Partition Keys
List<Referenceable> partKeys = new ArrayList<>();
Referenceable colRef;
if (hiveTable.getPartitionKeysSize() > 0) {
for (FieldSchema fs : hiveTable.getPartitionKeys()) {
colRef = new Referenceable(HiveDataTypes.HIVE_COLUMN.name());
colRef.set("name", fs.getName());
colRef.set("type", fs.getType());
colRef.set("comment", fs.getComment());
Referenceable colRefTyped = createInstance(colRef);
partKeys.add(colRefTyped);
}
tableRef.set("partitionKeys", partKeys);
}
tableRef.set("parameters", hiveTable.getParameters());
if (hiveTable.isSetViewOriginalText()) {
tableRef.set("viewOriginalText", hiveTable.getViewOriginalText());
}
if (hiveTable.isSetViewExpandedText()) {
tableRef.set("viewExpandedText", hiveTable.getViewExpandedText());
}
tableRef.set("tableType", hiveTable.getTableType());
tableRef.set("temporary", hiveTable.isTemporary());
Referenceable tableReferenceable = createInstance(tableRef);
// Import Partitions
importPartitions(db, tableName, databaseReferenceable, tableReferenceable, sdReferenceable);
// Import Indexes
importIndexes(db, tableName, databaseReferenceable, tableRef);
}
private void importPartitions(String db, String table,
Referenceable dbReferenceable,
Referenceable tableReferenceable,
Referenceable sdReferenceable) throws Exception {
List<Partition> tableParts = hiveMetaStoreClient.listPartitions(
db, table, Short.MAX_VALUE);
if (tableParts.size() > 0) {
for (Partition hivePart : tableParts) {
importPartition(hivePart, dbReferenceable, tableReferenceable, sdReferenceable);
}
}
}
private Referenceable importPartition(Partition hivePart,
Referenceable dbReferenceable,
Referenceable tableReferenceable,
Referenceable sdReferenceable) throws Exception {
Referenceable partRef = new Referenceable(HiveDataTypes.HIVE_PARTITION.name());
partRef.set("values", hivePart.getValues());
partRef.set("dbName", dbReferenceable);
partRef.set("tableName", tableReferenceable);
partRef.set("createTime", hivePart.getCreateTime());
partRef.set("lastAccessTime", hivePart.getLastAccessTime());
// sdStruct = fillStorageDescStruct(hivePart.getSd());
// Instead of creating copies of the sdstruct for partitions we are reusing existing
// ones will fix to identify partitions with differing schema.
partRef.set("sd", sdReferenceable);
partRef.set("parameters", hivePart.getParameters());
return createInstance(partRef);
}
private void importIndexes(String db, String table,
Referenceable dbReferenceable,
Referenceable tableReferenceable) throws Exception {
List<Index> indexes = hiveMetaStoreClient.listIndexes(db, table, Short.MAX_VALUE);
if (indexes.size() > 0) {
for (Index index : indexes) {
importIndex(index, dbReferenceable, tableReferenceable);
}
}
}
private void importIndex(Index index,
Referenceable dbReferenceable,
Referenceable tableReferenceable) throws Exception {
Referenceable indexRef = new Referenceable(HiveDataTypes.HIVE_INDEX.name());
indexRef.set("indexName", index.getIndexName());
indexRef.set("indexHandlerClass", index.getIndexHandlerClass());
indexRef.set("dbName", dbReferenceable);
indexRef.set("createTime", index.getCreateTime());
indexRef.set("lastAccessTime", index.getLastAccessTime());
indexRef.set("origTableName", index.getOrigTableName());
indexRef.set("indexTableName", index.getIndexTableName());
Referenceable sdReferenceable = fillStorageDescStruct(index.getSd());
indexRef.set("sd", sdReferenceable);
indexRef.set("parameters", index.getParameters());
tableReferenceable.set("deferredRebuild", index.isDeferredRebuild());
createInstance(indexRef);
}
private Referenceable fillStorageDescStruct(StorageDescriptor storageDesc) throws Exception {
LOG.debug("Filling storage descriptor information for " + storageDesc);
Referenceable sdReferenceable = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.name());
SerDeInfo serdeInfo = storageDesc.getSerdeInfo();
LOG.debug("serdeInfo = " + serdeInfo);
// SkewedInfo skewedInfo = storageDesc.getSkewedInfo();
String serdeInfoName = HiveDataTypes.HIVE_SERDE.name();
Struct serdeInfoStruct = new Struct(serdeInfoName);
serdeInfoStruct.set("name", serdeInfo.getName());
serdeInfoStruct.set("serializationLib", serdeInfo.getSerializationLib());
serdeInfoStruct.set("parameters", serdeInfo.getParameters());
sdReferenceable.set("serdeInfo", serdeInfoStruct);
// Will need to revisit this after we fix typesystem.
/*
LOG.info("skewedInfo = " + skewedInfo);
String skewedInfoName = HiveDataTypes.HIVE_SKEWEDINFO.name();
Struct skewedInfoStruct = new Struct(skewedInfoName);
if (skewedInfo.getSkewedColNames().size() > 0) {
skewedInfoStruct.set("skewedColNames", skewedInfo.getSkewedColNames());
skewedInfoStruct.set("skewedColValues", skewedInfo.getSkewedColValues());
skewedInfoStruct.set("skewedColValueLocationMaps",
skewedInfo.getSkewedColValueLocationMaps());
StructType skewedInfotype = (StructType) hiveTypeSystem.getDataType(skewedInfoName);
ITypedStruct skewedInfoStructTyped =
skewedInfotype.convert(skewedInfoStruct, Multiplicity.OPTIONAL);
sdStruct.set("skewedInfo", skewedInfoStructTyped);
}
*/
List<Referenceable> fieldsList = new ArrayList<>();
Referenceable colReferenceable;
for (FieldSchema fs : storageDesc.getCols()) {
LOG.debug("Processing field " + fs);
colReferenceable = new Referenceable(HiveDataTypes.HIVE_COLUMN.name());
colReferenceable.set("name", fs.getName());
colReferenceable.set("type", fs.getType());
colReferenceable.set("comment", fs.getComment());
fieldsList.add(createInstance(colReferenceable));
}
sdReferenceable.set("cols", fieldsList);
List<Struct> sortColsStruct = new ArrayList<>();
for (Order sortcol : storageDesc.getSortCols()) {
String hiveOrderName = HiveDataTypes.HIVE_ORDER.name();
Struct colStruct = new Struct(hiveOrderName);
colStruct.set("col", sortcol.getCol());
colStruct.set("order", sortcol.getOrder());
sortColsStruct.add(colStruct);
}
if (sortColsStruct.size() > 0) {
sdReferenceable.set("sortCols", sortColsStruct);
}
sdReferenceable.set("location", storageDesc.getLocation());
sdReferenceable.set("inputFormat", storageDesc.getInputFormat());
sdReferenceable.set("outputFormat", storageDesc.getOutputFormat());
sdReferenceable.set("compressed", storageDesc.isCompressed());
if (storageDesc.getBucketCols().size() > 0) {
sdReferenceable.set("bucketCols", storageDesc.getBucketCols());
}
sdReferenceable.set("parameters", storageDesc.getParameters());
sdReferenceable.set("storedAsSubDirectories", storageDesc.isStoredAsSubDirectories());
return createInstance(sdReferenceable);
}
static String getServerUrl(String[] args) {
String baseUrl = "http://localhost:21000";
if (args.length > 0) {
baseUrl = args[0];
}
return baseUrl;
}
public static void main(String[] argv) throws Exception {
String baseUrl = getServerUrl(argv);
HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(baseUrl);
hiveMetaStoreBridge.importHiveMetadata();
}
}
......@@ -127,9 +127,9 @@ public class HiveDataModelGenerator {
};
EnumTypeDefinition definition = new EnumTypeDefinition(
HiveDataTypes.HIVE_OBJECTTYPE.name(), values);
enumTypeDefinitionMap.put(HiveDataTypes.HIVE_OBJECTTYPE.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_OBJECTTYPE.name());
HiveDataTypes.HIVE_OBJECT_TYPE.getName(), values);
enumTypeDefinitionMap.put(HiveDataTypes.HIVE_OBJECT_TYPE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_OBJECT_TYPE.getName());
}
private void createHivePrincipalTypeEnum() throws MetadataException {
......@@ -140,10 +140,10 @@ public class HiveDataModelGenerator {
};
EnumTypeDefinition definition = new EnumTypeDefinition(
HiveDataTypes.HIVE_PRINCIPALTYPE.name(), values);
HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(), values);
enumTypeDefinitionMap.put(HiveDataTypes.HIVE_PRINCIPALTYPE.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PRINCIPALTYPE.name());
enumTypeDefinitionMap.put(HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName());
}
private void createFunctionTypeEnum() throws MetadataException {
......@@ -152,9 +152,9 @@ public class HiveDataModelGenerator {
};
EnumTypeDefinition definition = new EnumTypeDefinition(
HiveDataTypes.HIVE_FUNCTIONTYPE.name(), values);
enumTypeDefinitionMap.put(HiveDataTypes.HIVE_FUNCTIONTYPE.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_FUNCTIONTYPE.name());
HiveDataTypes.HIVE_FUNCTION_TYPE.getName(), values);
enumTypeDefinitionMap.put(HiveDataTypes.HIVE_FUNCTION_TYPE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_FUNCTION_TYPE.getName());
}
private void createResourceTypeEnum() throws MetadataException {
......@@ -164,9 +164,9 @@ public class HiveDataModelGenerator {
new EnumValue("ARCHIVE", 3),
};
EnumTypeDefinition definition = new EnumTypeDefinition(
HiveDataTypes.HIVE_RESOURCETYPE.name(), values);
enumTypeDefinitionMap.put(HiveDataTypes.HIVE_RESOURCETYPE.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_RESOURCETYPE.name());
HiveDataTypes.HIVE_RESOURCE_TYPE.getName(), values);
enumTypeDefinitionMap.put(HiveDataTypes.HIVE_RESOURCE_TYPE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_RESOURCE_TYPE.getName());
}
private void createSerDeStruct() throws MetadataException {
......@@ -178,10 +178,10 @@ public class HiveDataModelGenerator {
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
StructTypeDefinition definition = new StructTypeDefinition(HiveDataTypes.HIVE_SERDE.name(),
attributeDefinitions);
structTypeDefinitionMap.put(HiveDataTypes.HIVE_SERDE.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_SERDE.name());
StructTypeDefinition definition = new StructTypeDefinition(
HiveDataTypes.HIVE_SERDE.getName(), attributeDefinitions);
structTypeDefinitionMap.put(HiveDataTypes.HIVE_SERDE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_SERDE.getName());
}
/*
......@@ -200,10 +200,10 @@ public class HiveDataModelGenerator {
Multiplicity.OPTIONAL, false, null),
};
StructTypeDefinition definition = new StructTypeDefinition(
DefinedTypes.HIVE_SKEWEDINFO.name(), attributeDefinitions);
DefinedTypes.HIVE_SKEWEDINFO.getName(), attributeDefinitions);
structTypeDefinitionMap.put(DefinedTypes.HIVE_SKEWEDINFO.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_SKEWEDINFO.name());
structTypeDefinitionMap.put(DefinedTypes.HIVE_SKEWEDINFO.getName(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_SKEWEDINFO.getName());
}
*/
......@@ -216,15 +216,15 @@ public class HiveDataModelGenerator {
};
StructTypeDefinition definition = new StructTypeDefinition(
HiveDataTypes.HIVE_ORDER.name(), attributeDefinitions);
structTypeDefinitionMap.put(HiveDataTypes.HIVE_ORDER.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_ORDER.name());
HiveDataTypes.HIVE_ORDER.getName(), attributeDefinitions);
structTypeDefinitionMap.put(HiveDataTypes.HIVE_ORDER.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_ORDER.getName());
}
private void createStorageDescClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("cols",
String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.name()),
String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.COLLECTION, false, null),
new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
......@@ -236,41 +236,41 @@ public class HiveDataModelGenerator {
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("numBuckets", DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("serdeInfo", HiveDataTypes.HIVE_SERDE.name(),
new AttributeDefinition("serdeInfo", HiveDataTypes.HIVE_SERDE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("bucketCols",
String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sortCols",
String.format("array<%s>", HiveDataTypes.HIVE_ORDER.name()),
String.format("array<%s>", HiveDataTypes.HIVE_ORDER.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
//new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.name(),
//new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.getName(),
// Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("storedAsSubDirectories", DataTypes.BOOLEAN_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
ClassType.class, HiveDataTypes.HIVE_STORAGEDESC.name(), null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_STORAGEDESC.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_STORAGEDESC.name());
ClassType.class, HiveDataTypes.HIVE_STORAGEDESC.getName(), null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_STORAGEDESC.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_STORAGEDESC.getName());
}
/** Revisit later after nested array types are handled by the typesystem **/
private void createResourceUriStruct() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("resourceType", HiveDataTypes.HIVE_RESOURCETYPE.name(),
new AttributeDefinition("resourceType", HiveDataTypes.HIVE_RESOURCE_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("uri", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
};
StructTypeDefinition definition = new StructTypeDefinition(
HiveDataTypes.HIVE_RESOURCEURI.name(), attributeDefinitions);
structTypeDefinitionMap.put(HiveDataTypes.HIVE_RESOURCEURI.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_RESOURCEURI.name());
HiveDataTypes.HIVE_RESOURCEURI.getName(), attributeDefinitions);
structTypeDefinitionMap.put(HiveDataTypes.HIVE_RESOURCEURI.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_RESOURCEURI.getName());
}
private void createDBClass() throws MetadataException {
......@@ -285,15 +285,15 @@ public class HiveDataModelGenerator {
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerType", HiveDataTypes.HIVE_PRINCIPALTYPE.name(),
new AttributeDefinition("ownerType", HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_DB.name(),
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_DB.getName(),
null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_DB.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_DB.name());
classTypeDefinitions.put(HiveDataTypes.HIVE_DB.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_DB.getName());
}
private void createTypeClass() throws MetadataException {
......@@ -305,21 +305,21 @@ public class HiveDataModelGenerator {
new AttributeDefinition("type2", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("fields", String.format("array<%s>",
HiveDataTypes.HIVE_COLUMN.name()), Multiplicity.OPTIONAL, false, null),
HiveDataTypes.HIVE_COLUMN.getName()), Multiplicity.OPTIONAL, false, null),
};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_TYPE.name(),
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_TYPE.getName(),
null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_TYPE.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_TYPE.name());
classTypeDefinitions.put(HiveDataTypes.HIVE_TYPE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_TYPE.getName());
}
private void createColumnClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
//new AttributeDefinition("type", DefinedTypes.HIVE_TYPE.name(), Multiplicity
//new AttributeDefinition("type", DefinedTypes.HIVE_TYPE.getName(), Multiplicity
// .REQUIRED, false, null),
new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -327,11 +327,11 @@ public class HiveDataModelGenerator {
Multiplicity.OPTIONAL, false, null),
};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_COLUMN.name(),
new HierarchicalTypeDefinition<>(
ClassType.class, HiveDataTypes.HIVE_COLUMN.getName(),
null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_COLUMN.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_COLUMN.name());
classTypeDefinitions.put(HiveDataTypes.HIVE_COLUMN.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_COLUMN.getName());
}
private void createPartitionClass() throws MetadataException {
......@@ -339,36 +339,34 @@ public class HiveDataModelGenerator {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("values", DataTypes.STRING_TYPE.getName(),
Multiplicity.COLLECTION, false, null),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.name(),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("tableName", HiveDataTypes.HIVE_TABLE.name(),
new AttributeDefinition("tableName", HiveDataTypes.HIVE_TABLE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.name(),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(),
Multiplicity.REQUIRED, false, null),
//new AttributeDefinition("columns", String.format("array<%s>", DefinedTypes
// .HIVE_COLUMN.name()),
// Multiplicity.COLLECTION, true, null),
// .HIVE_COLUMN.getName()), Multiplicity.COLLECTION, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class,
HiveDataTypes.HIVE_PARTITION.name(),
null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_PARTITION.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PARTITION.name());
HiveDataTypes.HIVE_PARTITION.getName(), null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_PARTITION.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PARTITION.getName());
}
private void createTableClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("tableName", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.name(),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
......@@ -378,13 +376,13 @@ public class HiveDataModelGenerator {
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("retention", DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.name(),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("partitionKeys",
String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.name()),
String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, false, null),
//new AttributeDefinition("columns", String.format("array<%s>", DefinedTypes
// .HIVE_COLUMN.name()),
// new AttributeDefinition("columns", // todo - ask venkat
// String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.getName()),
// Multiplicity.COLLECTION, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
......@@ -398,10 +396,10 @@ public class HiveDataModelGenerator {
Multiplicity.OPTIONAL, false, null),
};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_TABLE.name(),
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_TABLE.getName(),
null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_TABLE.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_TABLE.name());
classTypeDefinitions.put(HiveDataTypes.HIVE_TABLE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_TABLE.getName());
}
private void createIndexClass() throws MetadataException {
......@@ -410,17 +408,17 @@ public class HiveDataModelGenerator {
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("indexHandlerClass", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.name(),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("origTableName", HiveDataTypes.HIVE_TABLE.name(),
new AttributeDefinition("origTableName", HiveDataTypes.HIVE_TABLE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("indexTableName", HiveDataTypes.HIVE_TABLE.name(),
new AttributeDefinition("indexTableName", HiveDataTypes.HIVE_TABLE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.name(),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
......@@ -429,37 +427,36 @@ public class HiveDataModelGenerator {
};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_INDEX.name(),
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_INDEX.getName(),
null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_INDEX.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_INDEX.name());
classTypeDefinitions.put(HiveDataTypes.HIVE_INDEX.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_INDEX.getName());
}
private void createFunctionClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("functionName", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.name(),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("className", DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerName", DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerType", HiveDataTypes.HIVE_PRINCIPALTYPE.name(),
new AttributeDefinition("ownerType", HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("functionType", HiveDataTypes.HIVE_FUNCTIONTYPE.name(),
new AttributeDefinition("functionType", HiveDataTypes.HIVE_FUNCTION_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("resourceUris", HiveDataTypes.HIVE_RESOURCEURI.name(),
new AttributeDefinition("resourceUris", HiveDataTypes.HIVE_RESOURCEURI.getName(),
Multiplicity.COLLECTION, false, null),
};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_FUNCTION.name(),
null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_FUNCTION.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_FUNCTION.name());
HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
ClassType.class, HiveDataTypes.HIVE_FUNCTION.getName(), null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_FUNCTION.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_FUNCTION.getName());
}
private void createRoleClass() throws MetadataException {
......@@ -471,12 +468,11 @@ public class HiveDataModelGenerator {
new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_ROLE.name(),
null, attributeDefinitions);
HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
ClassType.class, HiveDataTypes.HIVE_ROLE.getName(), null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_ROLE.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_ROLE.name());
classTypeDefinitions.put(HiveDataTypes.HIVE_ROLE.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_ROLE.getName());
}
private void createProcessClass() throws MetadataException {
......@@ -490,10 +486,10 @@ public class HiveDataModelGenerator {
new AttributeDefinition("userName", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("sourceTableNames",
String.format("array<%s>", HiveDataTypes.HIVE_TABLE.name()),
String.format("array<%s>", HiveDataTypes.HIVE_TABLE.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("targetTableNames",
String.format("array<%s>", HiveDataTypes.HIVE_TABLE.name()),
String.format("array<%s>", HiveDataTypes.HIVE_TABLE.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("queryText", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -506,9 +502,9 @@ public class HiveDataModelGenerator {
};
HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
ClassType.class, HiveDataTypes.HIVE_PROCESS.name(), null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_PROCESS.name(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PROCESS.name());
ClassType.class, HiveDataTypes.HIVE_PROCESS.getName(), null, attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_PROCESS.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PROCESS.getName());
}
public static void main(String[] args) throws Exception {
......
......@@ -24,10 +24,10 @@ package org.apache.hadoop.metadata.hive.model;
public enum HiveDataTypes {
// Enums
HIVE_OBJECTTYPE,
HIVE_PRINCIPALTYPE,
HIVE_RESOURCETYPE,
HIVE_FUNCTIONTYPE,
HIVE_OBJECT_TYPE,
HIVE_PRINCIPAL_TYPE,
HIVE_RESOURCE_TYPE,
HIVE_FUNCTION_TYPE,
// Structs
HIVE_SERDE,
......@@ -47,4 +47,9 @@ public enum HiveDataTypes {
HIVE_TYPE,
HIVE_PROCESS,
// HIVE_VIEW,
;
public String getName() {
return name().toLowerCase();
}
}
......@@ -46,6 +46,10 @@ import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
/**
* todo - this needs to be removed.
*/
@Deprecated
public class HiveImporter {
private static final Logger LOG =
......@@ -158,7 +162,7 @@ public class HiveImporter {
LOG.debug("creating instance of type " + typeName + " dataType " + dataType);
ITypedReferenceableInstance instance =
(ITypedReferenceableInstance) dataType.convert(ref, Multiplicity.OPTIONAL);
String guid = graphRepository.createEntity(instance, typeName);
String guid = graphRepository.createEntity(instance);
System.out.println("creating instance of type " + typeName + " dataType " + dataType
+ ", guid: " + guid);
......
{
"enumTypes":[
{
"name":"HIVE_FUNCTIONTYPE",
"enumValues":[
{
"value":"JAVA",
"ordinal":1
}
]
},
{
"name":"HIVE_PRINCIPALTYPE",
"enumValues":[
{
"value":"USER",
"ordinal":1
},
{
"value":"ROLE",
"ordinal":2
},
{
"value":"GROUP",
"ordinal":3
}
]
},
{
"name":"HIVE_OBJECTTYPE",
"name":"hive_object_type",
"enumValues":[
{
"value":"GLOBAL",
......@@ -52,7 +26,7 @@
]
},
{
"name":"HIVE_RESOURCETYPE",
"name":"hive_resource_type",
"enumValues":[
{
"value":"JAR",
......@@ -67,15 +41,41 @@
"ordinal":3
}
]
},
{
"name":"hive_principal_type",
"enumValues":[
{
"value":"USER",
"ordinal":1
},
{
"value":"ROLE",
"ordinal":2
},
{
"value":"GROUP",
"ordinal":3
}
]
},
{
"name":"hive_function_type",
"enumValues":[
{
"value":"JAVA",
"ordinal":1
}
]
}
],
"structTypes":[
{
"typeName":"HIVE_RESOURCEURI",
"typeName":"hive_order",
"attributeDefinitions":[
{
"name":"resourceType",
"dataTypeName":"HIVE_RESOURCETYPE",
"name":"col",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -83,8 +83,8 @@
"reverseAttributeName":null
},
{
"name":"uri",
"dataTypeName":"string",
"name":"order",
"dataTypeName":"int",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -94,11 +94,11 @@
]
},
{
"typeName":"HIVE_ORDER",
"typeName":"hive_resourceuri",
"attributeDefinitions":[
{
"name":"col",
"dataTypeName":"string",
"name":"resourceType",
"dataTypeName":"hive_resource_type",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -106,8 +106,8 @@
"reverseAttributeName":null
},
{
"name":"order",
"dataTypeName":"int",
"name":"uri",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -117,7 +117,7 @@
]
},
{
"typeName":"HIVE_SERDE",
"typeName":"hive_serde",
"attributeDefinitions":[
{
"name":"name",
......@@ -158,10 +158,10 @@
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"HIVE_FUNCTION",
"typeName":"hive_process",
"attributeDefinitions":[
{
"name":"functionName",
"name":"processName",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
......@@ -170,8 +170,8 @@
"reverseAttributeName":null
},
{
"name":"dbName",
"dataTypeName":"HIVE_DB",
"name":"startTime",
"dataTypeName":"int",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -179,8 +179,26 @@
"reverseAttributeName":null
},
{
"name":"className",
"name":"endTime",
"dataTypeName":"int",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"userName",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"sourceTableNames",
"dataTypeName":"array<hive_table>",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -188,8 +206,8 @@
"reverseAttributeName":null
},
{
"name":"ownerName",
"dataTypeName":"int",
"name":"targetTableNames",
"dataTypeName":"array<hive_table>",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -197,8 +215,8 @@
"reverseAttributeName":null
},
{
"name":"ownerType",
"dataTypeName":"HIVE_PRINCIPALTYPE",
"name":"queryText",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -206,8 +224,8 @@
"reverseAttributeName":null
},
{
"name":"createTime",
"dataTypeName":"int",
"name":"queryPlan",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -215,8 +233,8 @@
"reverseAttributeName":null
},
{
"name":"functionType",
"dataTypeName":"HIVE_FUNCTIONTYPE",
"name":"queryId",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -224,9 +242,9 @@
"reverseAttributeName":null
},
{
"name":"resourceUris",
"dataTypeName":"HIVE_RESOURCEURI",
"multiplicity":"collection",
"name":"queryGraph",
"dataTypeName":"string",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
......@@ -239,10 +257,10 @@
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"HIVE_PROCESS",
"typeName":"hive_function",
"attributeDefinitions":[
{
"name":"processName",
"name":"functionName",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
......@@ -251,8 +269,8 @@
"reverseAttributeName":null
},
{
"name":"startTime",
"dataTypeName":"int",
"name":"dbName",
"dataTypeName":"hive_db",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -260,26 +278,8 @@
"reverseAttributeName":null
},
{
"name":"endTime",
"name":"className",
"dataTypeName":"int",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"userName",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"sourceTableNames",
"dataTypeName":"array<HIVE_TABLE>",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -287,8 +287,8 @@
"reverseAttributeName":null
},
{
"name":"targetTableNames",
"dataTypeName":"array<HIVE_TABLE>",
"name":"ownerName",
"dataTypeName":"int",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -296,8 +296,8 @@
"reverseAttributeName":null
},
{
"name":"queryText",
"dataTypeName":"string",
"name":"ownerType",
"dataTypeName":"hive_principal_type",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -305,8 +305,8 @@
"reverseAttributeName":null
},
{
"name":"queryPlan",
"dataTypeName":"string",
"name":"createTime",
"dataTypeName":"int",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -314,8 +314,8 @@
"reverseAttributeName":null
},
{
"name":"queryId",
"dataTypeName":"string",
"name":"functionType",
"dataTypeName":"hive_function_type",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -323,9 +323,9 @@
"reverseAttributeName":null
},
{
"name":"queryGraph",
"dataTypeName":"string",
"multiplicity":"optional",
"name":"resourceUris",
"dataTypeName":"hive_resourceuri",
"multiplicity":"collection",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
......@@ -338,20 +338,11 @@
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"HIVE_PARTITION",
"typeName":"hive_type",
"attributeDefinitions":[
{
"name":"values",
"name":"name",
"dataTypeName":"string",
"multiplicity":"collection",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"dbName",
"dataTypeName":"HIVE_DB",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -359,17 +350,8 @@
"reverseAttributeName":null
},
{
"name":"tableName",
"dataTypeName":"HIVE_TABLE",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"createTime",
"dataTypeName":"int",
"name":"type1",
"dataTypeName":"string",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -377,8 +359,8 @@
"reverseAttributeName":null
},
{
"name":"lastAccessTime",
"dataTypeName":"int",
"name":"type2",
"dataTypeName":"string",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -386,17 +368,8 @@
"reverseAttributeName":null
},
{
"name":"sd",
"dataTypeName":"HIVE_STORAGEDESC",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"parameters",
"dataTypeName":"map<string,string>",
"name":"fields",
"dataTypeName":"array<hive_column>",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -410,37 +383,28 @@
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"HIVE_STORAGEDESC",
"typeName":"hive_table",
"attributeDefinitions":[
{
"name":"cols",
"dataTypeName":"array<HIVE_COLUMN>",
"multiplicity":"collection",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"location",
"name":"tableName",
"dataTypeName":"string",
"multiplicity":"optional",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"inputFormat",
"dataTypeName":"string",
"multiplicity":"optional",
"name":"dbName",
"dataTypeName":"hive_db",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"outputFormat",
"name":"owner",
"dataTypeName":"string",
"multiplicity":"optional",
"isComposite":false,
......@@ -449,16 +413,16 @@
"reverseAttributeName":null
},
{
"name":"compressed",
"dataTypeName":"boolean",
"multiplicity":"required",
"name":"createTime",
"dataTypeName":"int",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"numBuckets",
"name":"lastAccessTime",
"dataTypeName":"int",
"multiplicity":"optional",
"isComposite":false,
......@@ -467,8 +431,8 @@
"reverseAttributeName":null
},
{
"name":"serdeInfo",
"dataTypeName":"HIVE_SERDE",
"name":"retention",
"dataTypeName":"int",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -476,8 +440,8 @@
"reverseAttributeName":null
},
{
"name":"bucketCols",
"dataTypeName":"array<string>",
"name":"sd",
"dataTypeName":"hive_storagedesc",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -485,8 +449,8 @@
"reverseAttributeName":null
},
{
"name":"sortCols",
"dataTypeName":"array<HIVE_ORDER>",
"name":"partitionKeys",
"dataTypeName":"array<hive_column>",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -503,34 +467,16 @@
"reverseAttributeName":null
},
{
"name":"storedAsSubDirectories",
"dataTypeName":"boolean",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
}
]
},
{
"superTypes":[
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"HIVE_TYPE",
"attributeDefinitions":[
{
"name":"name",
"name":"viewOriginalText",
"dataTypeName":"string",
"multiplicity":"required",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"type1",
"name":"viewExpandedText",
"dataTypeName":"string",
"multiplicity":"optional",
"isComposite":false,
......@@ -539,7 +485,7 @@
"reverseAttributeName":null
},
{
"name":"type2",
"name":"tableType",
"dataTypeName":"string",
"multiplicity":"optional",
"isComposite":false,
......@@ -548,8 +494,8 @@
"reverseAttributeName":null
},
{
"name":"fields",
"dataTypeName":"array<HIVE_COLUMN>",
"name":"temporary",
"dataTypeName":"boolean",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -563,11 +509,20 @@
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"HIVE_COLUMN",
"typeName":"hive_partition",
"attributeDefinitions":[
{
"name":"name",
"name":"values",
"dataTypeName":"string",
"multiplicity":"collection",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"dbName",
"dataTypeName":"hive_db",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -575,8 +530,8 @@
"reverseAttributeName":null
},
{
"name":"type",
"dataTypeName":"string",
"name":"tableName",
"dataTypeName":"hive_table",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -584,35 +539,26 @@
"reverseAttributeName":null
},
{
"name":"comment",
"dataTypeName":"string",
"name":"createTime",
"dataTypeName":"int",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
}
]
},
{
"superTypes":[
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"HIVE_ROLE",
"attributeDefinitions":[
},
{
"name":"roleName",
"dataTypeName":"string",
"multiplicity":"required",
"name":"lastAccessTime",
"dataTypeName":"int",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"createTime",
"dataTypeName":"int",
"name":"sd",
"dataTypeName":"hive_storagedesc",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -620,9 +566,9 @@
"reverseAttributeName":null
},
{
"name":"ownerName",
"dataTypeName":"string",
"multiplicity":"required",
"name":"parameters",
"dataTypeName":"map<string,string>",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
......@@ -635,28 +581,19 @@
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"HIVE_TABLE",
"typeName":"hive_storagedesc",
"attributeDefinitions":[
{
"name":"tableName",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"dbName",
"dataTypeName":"HIVE_DB",
"multiplicity":"required",
"name":"cols",
"dataTypeName":"array<hive_column>",
"multiplicity":"collection",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"owner",
"name":"location",
"dataTypeName":"string",
"multiplicity":"optional",
"isComposite":false,
......@@ -665,17 +602,8 @@
"reverseAttributeName":null
},
{
"name":"createTime",
"dataTypeName":"int",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"lastAccessTime",
"dataTypeName":"int",
"name":"inputFormat",
"dataTypeName":"string",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -683,8 +611,8 @@
"reverseAttributeName":null
},
{
"name":"retention",
"dataTypeName":"int",
"name":"outputFormat",
"dataTypeName":"string",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -692,17 +620,17 @@
"reverseAttributeName":null
},
{
"name":"sd",
"dataTypeName":"HIVE_STORAGEDESC",
"multiplicity":"optional",
"name":"compressed",
"dataTypeName":"boolean",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"partitionKeys",
"dataTypeName":"array<HIVE_COLUMN>",
"name":"numBuckets",
"dataTypeName":"int",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -710,8 +638,8 @@
"reverseAttributeName":null
},
{
"name":"parameters",
"dataTypeName":"map<string,string>",
"name":"serdeInfo",
"dataTypeName":"hive_serde",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -719,8 +647,8 @@
"reverseAttributeName":null
},
{
"name":"viewOriginalText",
"dataTypeName":"string",
"name":"bucketCols",
"dataTypeName":"array<string>",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -728,8 +656,8 @@
"reverseAttributeName":null
},
{
"name":"viewExpandedText",
"dataTypeName":"string",
"name":"sortCols",
"dataTypeName":"array<hive_order>",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -737,8 +665,8 @@
"reverseAttributeName":null
},
{
"name":"tableType",
"dataTypeName":"string",
"name":"parameters",
"dataTypeName":"map<string,string>",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -746,7 +674,7 @@
"reverseAttributeName":null
},
{
"name":"temporary",
"name":"storedAsSubDirectories",
"dataTypeName":"boolean",
"multiplicity":"optional",
"isComposite":false,
......@@ -761,7 +689,7 @@
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"HIVE_INDEX",
"typeName":"hive_index",
"attributeDefinitions":[
{
"name":"indexName",
......@@ -783,7 +711,7 @@
},
{
"name":"dbName",
"dataTypeName":"HIVE_DB",
"dataTypeName":"hive_db",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -810,7 +738,7 @@
},
{
"name":"origTableName",
"dataTypeName":"HIVE_TABLE",
"dataTypeName":"hive_table",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -819,7 +747,7 @@
},
{
"name":"indexTableName",
"dataTypeName":"HIVE_TABLE",
"dataTypeName":"hive_table",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -828,7 +756,7 @@
},
{
"name":"sd",
"dataTypeName":"HIVE_STORAGEDESC",
"dataTypeName":"hive_storagedesc",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
......@@ -860,7 +788,43 @@
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"HIVE_DB",
"typeName":"hive_role",
"attributeDefinitions":[
{
"name":"roleName",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"createTime",
"dataTypeName":"int",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"ownerName",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
}
]
},
{
"superTypes":[
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"hive_db",
"attributeDefinitions":[
{
"name":"name",
......@@ -909,7 +873,43 @@
},
{
"name":"ownerType",
"dataTypeName":"HIVE_PRINCIPALTYPE",
"dataTypeName":"hive_principal_type",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
}
]
},
{
"superTypes":[
],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType",
"typeName":"hive_column",
"attributeDefinitions":[
{
"name":"name",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"type",
"dataTypeName":"string",
"multiplicity":"required",
"isComposite":false,
"isUnique":false,
"isIndexable":true,
"reverseAttributeName":null
},
{
"name":"comment",
"dataTypeName":"string",
"multiplicity":"optional",
"isComposite":false,
"isUnique":false,
......@@ -919,4 +919,4 @@
]
}
]
}
\ No newline at end of file
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment