Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
a86f0f3e
Commit
a86f0f3e
authored
Mar 23, 2015
by
Venkatesh Seetharam
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Add Hive Bridge with major refactoring
parent
e747e2ae
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
700 additions
and
347 deletions
+700
-347
pom.xml
addons/hive-bridge/pom.xml
+12
-5
HiveMetaStoreBridge.java
...ache/hadoop/metadata/hive/bridge/HiveMetaStoreBridge.java
+342
-0
HiveDataModelGenerator.java
...he/hadoop/metadata/hive/model/HiveDataModelGenerator.java
+85
-89
HiveDataTypes.java
.../org/apache/hadoop/metadata/hive/model/HiveDataTypes.java
+9
-4
HiveImporter.java
...va/org/apache/hadoop/metadata/hivetypes/HiveImporter.java
+5
-1
hive-data-model.json
src/main/examples/hive-data-model.json
+247
-248
No files found.
addons/hive-bridge/pom.xml
View file @
a86f0f3e
...
...
@@ -34,10 +34,22 @@
<properties>
<hive.version>
0.14.0
</hive.version>
<hadoop.version>
2.5.0
</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>
org.apache.hadoop.metadata
</groupId>
<artifactId>
metadata-client
</artifactId>
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-client
</artifactId>
<version>
${hadoop.version}
</version>
</dependency>
<dependency>
<groupId>
org.apache.hive
</groupId>
<artifactId>
hive-metastore
</artifactId>
<version>
${hive.version}
</version>
...
...
@@ -96,11 +108,6 @@
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-client
</artifactId>
</dependency>
<dependency>
<groupId>
log4j
</groupId>
<artifactId>
log4j
</artifactId>
</dependency>
...
...
addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/bridge/HiveMetaStoreBridge.java
0 → 100644
View file @
a86f0f3e
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
hive
.
bridge
;
import
org.apache.hadoop.hive.conf.HiveConf
;
import
org.apache.hadoop.hive.metastore.HiveMetaStoreClient
;
import
org.apache.hadoop.hive.metastore.api.Database
;
import
org.apache.hadoop.hive.metastore.api.FieldSchema
;
import
org.apache.hadoop.hive.metastore.api.Index
;
import
org.apache.hadoop.hive.metastore.api.Order
;
import
org.apache.hadoop.hive.metastore.api.Partition
;
import
org.apache.hadoop.hive.metastore.api.SerDeInfo
;
import
org.apache.hadoop.hive.metastore.api.StorageDescriptor
;
import
org.apache.hadoop.hive.metastore.api.Table
;
import
org.apache.hadoop.metadata.MetadataServiceClient
;
import
org.apache.hadoop.metadata.hive.model.HiveDataTypes
;
import
org.apache.hadoop.metadata.typesystem.Referenceable
;
import
org.apache.hadoop.metadata.typesystem.Struct
;
import
org.apache.hadoop.metadata.typesystem.json.InstanceSerialization
;
import
org.codehaus.jettison.json.JSONObject
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.util.ArrayList
;
import
java.util.List
;
/**
* A Bridge Utility that imports metadata from the Hive Meta Store
* and registers then in DGI.
*/
public
class
HiveMetaStoreBridge
{
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
HiveMetaStoreBridge
.
class
);
private
final
HiveMetaStoreClient
hiveMetaStoreClient
;
private
final
MetadataServiceClient
metadataServiceClient
;
/**
* Construct a HiveMetaStoreBridge.
* @param baseUrl metadata service url
*/
public
HiveMetaStoreBridge
(
String
baseUrl
)
throws
Exception
{
hiveMetaStoreClient
=
createHiveMetaStoreClient
();
metadataServiceClient
=
new
MetadataServiceClient
(
baseUrl
);
}
private
HiveMetaStoreClient
createHiveMetaStoreClient
()
throws
Exception
{
HiveConf
conf
=
new
HiveConf
();
return
new
HiveMetaStoreClient
(
conf
);
}
public
void
importHiveMetadata
()
throws
Exception
{
LOG
.
info
(
"Importing hive metadata"
);
importDatabases
();
}
private
void
importDatabases
()
throws
Exception
{
List
<
String
>
databases
=
hiveMetaStoreClient
.
getAllDatabases
();
for
(
String
databaseName
:
databases
)
{
importDatabase
(
databaseName
);
}
}
private
void
importDatabase
(
String
databaseName
)
throws
Exception
{
LOG
.
info
(
"Importing objects from databaseName : "
+
databaseName
);
Database
hiveDB
=
hiveMetaStoreClient
.
getDatabase
(
databaseName
);
Referenceable
dbRef
=
new
Referenceable
(
HiveDataTypes
.
HIVE_DB
.
name
());
dbRef
.
set
(
"name"
,
hiveDB
.
getName
());
dbRef
.
set
(
"description"
,
hiveDB
.
getDescription
());
dbRef
.
set
(
"locationUri"
,
hiveDB
.
getLocationUri
());
dbRef
.
set
(
"parameters"
,
hiveDB
.
getParameters
());
dbRef
.
set
(
"ownerName"
,
hiveDB
.
getOwnerName
());
dbRef
.
set
(
"ownerType"
,
hiveDB
.
getOwnerType
().
getValue
());
Referenceable
databaseReferenceable
=
createInstance
(
dbRef
);
importTables
(
databaseName
,
databaseReferenceable
);
}
private
Referenceable
createInstance
(
Referenceable
referenceable
)
throws
Exception
{
String
typeName
=
referenceable
.
getTypeName
();
LOG
.
debug
(
"creating instance of type "
+
typeName
);
String
entityJSON
=
InstanceSerialization
.
toJson
(
referenceable
,
true
);
LOG
.
debug
(
"Submitting new entity= "
+
entityJSON
);
JSONObject
jsonObject
=
metadataServiceClient
.
createEntity
(
entityJSON
);
String
guid
=
jsonObject
.
getString
(
MetadataServiceClient
.
RESULTS
);
LOG
.
debug
(
"created instance for type "
+
typeName
+
", guid: "
+
guid
);
return
new
Referenceable
(
guid
,
referenceable
.
getTypeName
(),
referenceable
.
getValuesMap
());
}
private
void
importTables
(
String
databaseName
,
Referenceable
databaseReferenceable
)
throws
Exception
{
List
<
String
>
hiveTables
=
hiveMetaStoreClient
.
getAllTables
(
databaseName
);
for
(
String
tableName
:
hiveTables
)
{
importTable
(
databaseName
,
tableName
,
databaseReferenceable
);
}
}
private
void
importTable
(
String
db
,
String
tableName
,
Referenceable
databaseReferenceable
)
throws
Exception
{
LOG
.
info
(
"Importing objects from "
+
db
+
"."
+
tableName
);
Table
hiveTable
=
hiveMetaStoreClient
.
getTable
(
db
,
tableName
);
Referenceable
tableRef
=
new
Referenceable
(
HiveDataTypes
.
HIVE_TABLE
.
name
());
tableRef
.
set
(
"tableName"
,
hiveTable
.
getTableName
());
tableRef
.
set
(
"owner"
,
hiveTable
.
getOwner
());
tableRef
.
set
(
"createTime"
,
hiveTable
.
getCreateTime
());
tableRef
.
set
(
"lastAccessTime"
,
hiveTable
.
getLastAccessTime
());
tableRef
.
set
(
"retention"
,
hiveTable
.
getRetention
());
// add reference to the database
tableRef
.
set
(
"dbName"
,
databaseReferenceable
);
// add reference to the StorageDescriptor
StorageDescriptor
storageDesc
=
hiveTable
.
getSd
();
Referenceable
sdReferenceable
=
fillStorageDescStruct
(
storageDesc
);
tableRef
.
set
(
"sd"
,
sdReferenceable
);
// add reference to the Partition Keys
List
<
Referenceable
>
partKeys
=
new
ArrayList
<>();
Referenceable
colRef
;
if
(
hiveTable
.
getPartitionKeysSize
()
>
0
)
{
for
(
FieldSchema
fs
:
hiveTable
.
getPartitionKeys
())
{
colRef
=
new
Referenceable
(
HiveDataTypes
.
HIVE_COLUMN
.
name
());
colRef
.
set
(
"name"
,
fs
.
getName
());
colRef
.
set
(
"type"
,
fs
.
getType
());
colRef
.
set
(
"comment"
,
fs
.
getComment
());
Referenceable
colRefTyped
=
createInstance
(
colRef
);
partKeys
.
add
(
colRefTyped
);
}
tableRef
.
set
(
"partitionKeys"
,
partKeys
);
}
tableRef
.
set
(
"parameters"
,
hiveTable
.
getParameters
());
if
(
hiveTable
.
isSetViewOriginalText
())
{
tableRef
.
set
(
"viewOriginalText"
,
hiveTable
.
getViewOriginalText
());
}
if
(
hiveTable
.
isSetViewExpandedText
())
{
tableRef
.
set
(
"viewExpandedText"
,
hiveTable
.
getViewExpandedText
());
}
tableRef
.
set
(
"tableType"
,
hiveTable
.
getTableType
());
tableRef
.
set
(
"temporary"
,
hiveTable
.
isTemporary
());
Referenceable
tableReferenceable
=
createInstance
(
tableRef
);
// Import Partitions
importPartitions
(
db
,
tableName
,
databaseReferenceable
,
tableReferenceable
,
sdReferenceable
);
// Import Indexes
importIndexes
(
db
,
tableName
,
databaseReferenceable
,
tableRef
);
}
private
void
importPartitions
(
String
db
,
String
table
,
Referenceable
dbReferenceable
,
Referenceable
tableReferenceable
,
Referenceable
sdReferenceable
)
throws
Exception
{
List
<
Partition
>
tableParts
=
hiveMetaStoreClient
.
listPartitions
(
db
,
table
,
Short
.
MAX_VALUE
);
if
(
tableParts
.
size
()
>
0
)
{
for
(
Partition
hivePart
:
tableParts
)
{
importPartition
(
hivePart
,
dbReferenceable
,
tableReferenceable
,
sdReferenceable
);
}
}
}
private
Referenceable
importPartition
(
Partition
hivePart
,
Referenceable
dbReferenceable
,
Referenceable
tableReferenceable
,
Referenceable
sdReferenceable
)
throws
Exception
{
Referenceable
partRef
=
new
Referenceable
(
HiveDataTypes
.
HIVE_PARTITION
.
name
());
partRef
.
set
(
"values"
,
hivePart
.
getValues
());
partRef
.
set
(
"dbName"
,
dbReferenceable
);
partRef
.
set
(
"tableName"
,
tableReferenceable
);
partRef
.
set
(
"createTime"
,
hivePart
.
getCreateTime
());
partRef
.
set
(
"lastAccessTime"
,
hivePart
.
getLastAccessTime
());
// sdStruct = fillStorageDescStruct(hivePart.getSd());
// Instead of creating copies of the sdstruct for partitions we are reusing existing
// ones will fix to identify partitions with differing schema.
partRef
.
set
(
"sd"
,
sdReferenceable
);
partRef
.
set
(
"parameters"
,
hivePart
.
getParameters
());
return
createInstance
(
partRef
);
}
private
void
importIndexes
(
String
db
,
String
table
,
Referenceable
dbReferenceable
,
Referenceable
tableReferenceable
)
throws
Exception
{
List
<
Index
>
indexes
=
hiveMetaStoreClient
.
listIndexes
(
db
,
table
,
Short
.
MAX_VALUE
);
if
(
indexes
.
size
()
>
0
)
{
for
(
Index
index
:
indexes
)
{
importIndex
(
index
,
dbReferenceable
,
tableReferenceable
);
}
}
}
private
void
importIndex
(
Index
index
,
Referenceable
dbReferenceable
,
Referenceable
tableReferenceable
)
throws
Exception
{
Referenceable
indexRef
=
new
Referenceable
(
HiveDataTypes
.
HIVE_INDEX
.
name
());
indexRef
.
set
(
"indexName"
,
index
.
getIndexName
());
indexRef
.
set
(
"indexHandlerClass"
,
index
.
getIndexHandlerClass
());
indexRef
.
set
(
"dbName"
,
dbReferenceable
);
indexRef
.
set
(
"createTime"
,
index
.
getCreateTime
());
indexRef
.
set
(
"lastAccessTime"
,
index
.
getLastAccessTime
());
indexRef
.
set
(
"origTableName"
,
index
.
getOrigTableName
());
indexRef
.
set
(
"indexTableName"
,
index
.
getIndexTableName
());
Referenceable
sdReferenceable
=
fillStorageDescStruct
(
index
.
getSd
());
indexRef
.
set
(
"sd"
,
sdReferenceable
);
indexRef
.
set
(
"parameters"
,
index
.
getParameters
());
tableReferenceable
.
set
(
"deferredRebuild"
,
index
.
isDeferredRebuild
());
createInstance
(
indexRef
);
}
private
Referenceable
fillStorageDescStruct
(
StorageDescriptor
storageDesc
)
throws
Exception
{
LOG
.
debug
(
"Filling storage descriptor information for "
+
storageDesc
);
Referenceable
sdReferenceable
=
new
Referenceable
(
HiveDataTypes
.
HIVE_STORAGEDESC
.
name
());
SerDeInfo
serdeInfo
=
storageDesc
.
getSerdeInfo
();
LOG
.
debug
(
"serdeInfo = "
+
serdeInfo
);
// SkewedInfo skewedInfo = storageDesc.getSkewedInfo();
String
serdeInfoName
=
HiveDataTypes
.
HIVE_SERDE
.
name
();
Struct
serdeInfoStruct
=
new
Struct
(
serdeInfoName
);
serdeInfoStruct
.
set
(
"name"
,
serdeInfo
.
getName
());
serdeInfoStruct
.
set
(
"serializationLib"
,
serdeInfo
.
getSerializationLib
());
serdeInfoStruct
.
set
(
"parameters"
,
serdeInfo
.
getParameters
());
sdReferenceable
.
set
(
"serdeInfo"
,
serdeInfoStruct
);
// Will need to revisit this after we fix typesystem.
/*
LOG.info("skewedInfo = " + skewedInfo);
String skewedInfoName = HiveDataTypes.HIVE_SKEWEDINFO.name();
Struct skewedInfoStruct = new Struct(skewedInfoName);
if (skewedInfo.getSkewedColNames().size() > 0) {
skewedInfoStruct.set("skewedColNames", skewedInfo.getSkewedColNames());
skewedInfoStruct.set("skewedColValues", skewedInfo.getSkewedColValues());
skewedInfoStruct.set("skewedColValueLocationMaps",
skewedInfo.getSkewedColValueLocationMaps());
StructType skewedInfotype = (StructType) hiveTypeSystem.getDataType(skewedInfoName);
ITypedStruct skewedInfoStructTyped =
skewedInfotype.convert(skewedInfoStruct, Multiplicity.OPTIONAL);
sdStruct.set("skewedInfo", skewedInfoStructTyped);
}
*/
List
<
Referenceable
>
fieldsList
=
new
ArrayList
<>();
Referenceable
colReferenceable
;
for
(
FieldSchema
fs
:
storageDesc
.
getCols
())
{
LOG
.
debug
(
"Processing field "
+
fs
);
colReferenceable
=
new
Referenceable
(
HiveDataTypes
.
HIVE_COLUMN
.
name
());
colReferenceable
.
set
(
"name"
,
fs
.
getName
());
colReferenceable
.
set
(
"type"
,
fs
.
getType
());
colReferenceable
.
set
(
"comment"
,
fs
.
getComment
());
fieldsList
.
add
(
createInstance
(
colReferenceable
));
}
sdReferenceable
.
set
(
"cols"
,
fieldsList
);
List
<
Struct
>
sortColsStruct
=
new
ArrayList
<>();
for
(
Order
sortcol
:
storageDesc
.
getSortCols
())
{
String
hiveOrderName
=
HiveDataTypes
.
HIVE_ORDER
.
name
();
Struct
colStruct
=
new
Struct
(
hiveOrderName
);
colStruct
.
set
(
"col"
,
sortcol
.
getCol
());
colStruct
.
set
(
"order"
,
sortcol
.
getOrder
());
sortColsStruct
.
add
(
colStruct
);
}
if
(
sortColsStruct
.
size
()
>
0
)
{
sdReferenceable
.
set
(
"sortCols"
,
sortColsStruct
);
}
sdReferenceable
.
set
(
"location"
,
storageDesc
.
getLocation
());
sdReferenceable
.
set
(
"inputFormat"
,
storageDesc
.
getInputFormat
());
sdReferenceable
.
set
(
"outputFormat"
,
storageDesc
.
getOutputFormat
());
sdReferenceable
.
set
(
"compressed"
,
storageDesc
.
isCompressed
());
if
(
storageDesc
.
getBucketCols
().
size
()
>
0
)
{
sdReferenceable
.
set
(
"bucketCols"
,
storageDesc
.
getBucketCols
());
}
sdReferenceable
.
set
(
"parameters"
,
storageDesc
.
getParameters
());
sdReferenceable
.
set
(
"storedAsSubDirectories"
,
storageDesc
.
isStoredAsSubDirectories
());
return
createInstance
(
sdReferenceable
);
}
static
String
getServerUrl
(
String
[]
args
)
{
String
baseUrl
=
"http://localhost:21000"
;
if
(
args
.
length
>
0
)
{
baseUrl
=
args
[
0
];
}
return
baseUrl
;
}
public
static
void
main
(
String
[]
argv
)
throws
Exception
{
String
baseUrl
=
getServerUrl
(
argv
);
HiveMetaStoreBridge
hiveMetaStoreBridge
=
new
HiveMetaStoreBridge
(
baseUrl
);
hiveMetaStoreBridge
.
importHiveMetadata
();
}
}
addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/model/HiveDataModelGenerator.java
View file @
a86f0f3e
...
...
@@ -127,9 +127,9 @@ public class HiveDataModelGenerator {
};
EnumTypeDefinition
definition
=
new
EnumTypeDefinition
(
HiveDataTypes
.
HIVE_OBJECT
TYPE
.
n
ame
(),
values
);
enumTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_OBJECT
TYPE
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_OBJECT
TYPE
.
n
ame
());
HiveDataTypes
.
HIVE_OBJECT
_TYPE
.
getN
ame
(),
values
);
enumTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_OBJECT
_TYPE
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_OBJECT
_TYPE
.
getN
ame
());
}
private
void
createHivePrincipalTypeEnum
()
throws
MetadataException
{
...
...
@@ -140,10 +140,10 @@ public class HiveDataModelGenerator {
};
EnumTypeDefinition
definition
=
new
EnumTypeDefinition
(
HiveDataTypes
.
HIVE_PRINCIPAL
TYPE
.
n
ame
(),
values
);
HiveDataTypes
.
HIVE_PRINCIPAL
_TYPE
.
getN
ame
(),
values
);
enumTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_PRINCIPAL
TYPE
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_PRINCIPAL
TYPE
.
n
ame
());
enumTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_PRINCIPAL
_TYPE
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_PRINCIPAL
_TYPE
.
getN
ame
());
}
private
void
createFunctionTypeEnum
()
throws
MetadataException
{
...
...
@@ -152,9 +152,9 @@ public class HiveDataModelGenerator {
};
EnumTypeDefinition
definition
=
new
EnumTypeDefinition
(
HiveDataTypes
.
HIVE_FUNCTION
TYPE
.
n
ame
(),
values
);
enumTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_FUNCTION
TYPE
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_FUNCTION
TYPE
.
n
ame
());
HiveDataTypes
.
HIVE_FUNCTION
_TYPE
.
getN
ame
(),
values
);
enumTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_FUNCTION
_TYPE
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_FUNCTION
_TYPE
.
getN
ame
());
}
private
void
createResourceTypeEnum
()
throws
MetadataException
{
...
...
@@ -164,9 +164,9 @@ public class HiveDataModelGenerator {
new
EnumValue
(
"ARCHIVE"
,
3
),
};
EnumTypeDefinition
definition
=
new
EnumTypeDefinition
(
HiveDataTypes
.
HIVE_RESOURCE
TYPE
.
n
ame
(),
values
);
enumTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_RESOURCE
TYPE
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_RESOURCE
TYPE
.
n
ame
());
HiveDataTypes
.
HIVE_RESOURCE
_TYPE
.
getN
ame
(),
values
);
enumTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_RESOURCE
_TYPE
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_RESOURCE
_TYPE
.
getN
ame
());
}
private
void
createSerDeStruct
()
throws
MetadataException
{
...
...
@@ -178,10 +178,10 @@ public class HiveDataModelGenerator {
new
AttributeDefinition
(
"parameters"
,
STRING_MAP_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
};
StructTypeDefinition
definition
=
new
StructTypeDefinition
(
HiveDataTypes
.
HIVE_SERDE
.
name
(),
attributeDefinitions
);
structTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_SERDE
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_SERDE
.
n
ame
());
StructTypeDefinition
definition
=
new
StructTypeDefinition
(
HiveDataTypes
.
HIVE_SERDE
.
getName
(),
attributeDefinitions
);
structTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_SERDE
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_SERDE
.
getN
ame
());
}
/*
...
...
@@ -200,10 +200,10 @@ public class HiveDataModelGenerator {
Multiplicity.OPTIONAL, false, null),
};
StructTypeDefinition definition = new StructTypeDefinition(
DefinedTypes.HIVE_SKEWEDINFO.
n
ame(), attributeDefinitions);
DefinedTypes.HIVE_SKEWEDINFO.
getN
ame(), attributeDefinitions);
structTypeDefinitionMap.put(DefinedTypes.HIVE_SKEWEDINFO.
n
ame(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_SKEWEDINFO.
n
ame());
structTypeDefinitionMap.put(DefinedTypes.HIVE_SKEWEDINFO.
getN
ame(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_SKEWEDINFO.
getN
ame());
}
*/
...
...
@@ -216,15 +216,15 @@ public class HiveDataModelGenerator {
};
StructTypeDefinition
definition
=
new
StructTypeDefinition
(
HiveDataTypes
.
HIVE_ORDER
.
n
ame
(),
attributeDefinitions
);
structTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_ORDER
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_ORDER
.
n
ame
());
HiveDataTypes
.
HIVE_ORDER
.
getN
ame
(),
attributeDefinitions
);
structTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_ORDER
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_ORDER
.
getN
ame
());
}
private
void
createStorageDescClass
()
throws
MetadataException
{
AttributeDefinition
[]
attributeDefinitions
=
new
AttributeDefinition
[]{
new
AttributeDefinition
(
"cols"
,
String
.
format
(
"array<%s>"
,
HiveDataTypes
.
HIVE_COLUMN
.
n
ame
()),
String
.
format
(
"array<%s>"
,
HiveDataTypes
.
HIVE_COLUMN
.
getN
ame
()),
Multiplicity
.
COLLECTION
,
false
,
null
),
new
AttributeDefinition
(
"location"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
...
...
@@ -236,41 +236,41 @@ public class HiveDataModelGenerator {
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"numBuckets"
,
DataTypes
.
INT_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"serdeInfo"
,
HiveDataTypes
.
HIVE_SERDE
.
n
ame
(),
new
AttributeDefinition
(
"serdeInfo"
,
HiveDataTypes
.
HIVE_SERDE
.
getN
ame
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"bucketCols"
,
String
.
format
(
"array<%s>"
,
DataTypes
.
STRING_TYPE
.
getName
()),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"sortCols"
,
String
.
format
(
"array<%s>"
,
HiveDataTypes
.
HIVE_ORDER
.
n
ame
()),
String
.
format
(
"array<%s>"
,
HiveDataTypes
.
HIVE_ORDER
.
getN
ame
()),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"parameters"
,
STRING_MAP_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
//new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.
n
ame(),
//new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.
getN
ame(),
// Multiplicity.OPTIONAL, false, null),
new
AttributeDefinition
(
"storedAsSubDirectories"
,
DataTypes
.
BOOLEAN_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
};
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_STORAGEDESC
.
n
ame
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_STORAGEDESC
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_STORAGEDESC
.
n
ame
());
ClassType
.
class
,
HiveDataTypes
.
HIVE_STORAGEDESC
.
getN
ame
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_STORAGEDESC
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_STORAGEDESC
.
getN
ame
());
}
/** Revisit later after nested array types are handled by the typesystem **/
private
void
createResourceUriStruct
()
throws
MetadataException
{
AttributeDefinition
[]
attributeDefinitions
=
new
AttributeDefinition
[]{
new
AttributeDefinition
(
"resourceType"
,
HiveDataTypes
.
HIVE_RESOURCE
TYPE
.
n
ame
(),
new
AttributeDefinition
(
"resourceType"
,
HiveDataTypes
.
HIVE_RESOURCE
_TYPE
.
getN
ame
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"uri"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
};
StructTypeDefinition
definition
=
new
StructTypeDefinition
(
HiveDataTypes
.
HIVE_RESOURCEURI
.
n
ame
(),
attributeDefinitions
);
structTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_RESOURCEURI
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_RESOURCEURI
.
n
ame
());
HiveDataTypes
.
HIVE_RESOURCEURI
.
getN
ame
(),
attributeDefinitions
);
structTypeDefinitionMap
.
put
(
HiveDataTypes
.
HIVE_RESOURCEURI
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_RESOURCEURI
.
getN
ame
());
}
private
void
createDBClass
()
throws
MetadataException
{
...
...
@@ -285,15 +285,15 @@ public class HiveDataModelGenerator {
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"ownerName"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"ownerType"
,
HiveDataTypes
.
HIVE_PRINCIPAL
TYPE
.
n
ame
(),
new
AttributeDefinition
(
"ownerType"
,
HiveDataTypes
.
HIVE_PRINCIPAL
_TYPE
.
getN
ame
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
};
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_DB
.
n
ame
(),
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_DB
.
getN
ame
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_DB
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_DB
.
n
ame
());
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_DB
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_DB
.
getN
ame
());
}
private
void
createTypeClass
()
throws
MetadataException
{
...
...
@@ -305,21 +305,21 @@ public class HiveDataModelGenerator {
new
AttributeDefinition
(
"type2"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"fields"
,
String
.
format
(
"array<%s>"
,
HiveDataTypes
.
HIVE_COLUMN
.
n
ame
()),
Multiplicity
.
OPTIONAL
,
false
,
null
),
HiveDataTypes
.
HIVE_COLUMN
.
getN
ame
()),
Multiplicity
.
OPTIONAL
,
false
,
null
),
};
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_TYPE
.
n
ame
(),
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_TYPE
.
getN
ame
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_TYPE
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_TYPE
.
n
ame
());
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_TYPE
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_TYPE
.
getN
ame
());
}
private
void
createColumnClass
()
throws
MetadataException
{
AttributeDefinition
[]
attributeDefinitions
=
new
AttributeDefinition
[]{
new
AttributeDefinition
(
"name"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
//new AttributeDefinition("type", DefinedTypes.HIVE_TYPE.
n
ame(), Multiplicity
//new AttributeDefinition("type", DefinedTypes.HIVE_TYPE.
getN
ame(), Multiplicity
// .REQUIRED, false, null),
new
AttributeDefinition
(
"type"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
...
...
@@ -327,11 +327,11 @@ public class HiveDataModelGenerator {
Multiplicity
.
OPTIONAL
,
false
,
null
),
};
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_COLUMN
.
name
(),
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_COLUMN
.
getName
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_COLUMN
.
name
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_COLUMN
.
name
());
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_COLUMN
.
getName
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_COLUMN
.
getName
());
}
private
void
createPartitionClass
()
throws
MetadataException
{
...
...
@@ -339,36 +339,34 @@ public class HiveDataModelGenerator {
AttributeDefinition
[]
attributeDefinitions
=
new
AttributeDefinition
[]{
new
AttributeDefinition
(
"values"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
COLLECTION
,
false
,
null
),
new
AttributeDefinition
(
"dbName"
,
HiveDataTypes
.
HIVE_DB
.
n
ame
(),
new
AttributeDefinition
(
"dbName"
,
HiveDataTypes
.
HIVE_DB
.
getN
ame
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"tableName"
,
HiveDataTypes
.
HIVE_TABLE
.
n
ame
(),
new
AttributeDefinition
(
"tableName"
,
HiveDataTypes
.
HIVE_TABLE
.
getN
ame
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"createTime"
,
DataTypes
.
INT_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"lastAccessTime"
,
DataTypes
.
INT_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"sd"
,
HiveDataTypes
.
HIVE_STORAGEDESC
.
n
ame
(),
new
AttributeDefinition
(
"sd"
,
HiveDataTypes
.
HIVE_STORAGEDESC
.
getN
ame
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
//new AttributeDefinition("columns", String.format("array<%s>", DefinedTypes
// .HIVE_COLUMN.name()),
// Multiplicity.COLLECTION, true, null),
// .HIVE_COLUMN.getName()), Multiplicity.COLLECTION, true, null),
new
AttributeDefinition
(
"parameters"
,
STRING_MAP_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
};
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_PARTITION
.
name
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_PARTITION
.
name
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_PARTITION
.
name
());
HiveDataTypes
.
HIVE_PARTITION
.
getName
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_PARTITION
.
getName
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_PARTITION
.
getName
());
}
private
void
createTableClass
()
throws
MetadataException
{
AttributeDefinition
[]
attributeDefinitions
=
new
AttributeDefinition
[]{
new
AttributeDefinition
(
"tableName"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"dbName"
,
HiveDataTypes
.
HIVE_DB
.
n
ame
(),
new
AttributeDefinition
(
"dbName"
,
HiveDataTypes
.
HIVE_DB
.
getN
ame
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"owner"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
...
...
@@ -378,13 +376,13 @@ public class HiveDataModelGenerator {
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"retention"
,
DataTypes
.
INT_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"sd"
,
HiveDataTypes
.
HIVE_STORAGEDESC
.
n
ame
(),
new
AttributeDefinition
(
"sd"
,
HiveDataTypes
.
HIVE_STORAGEDESC
.
getN
ame
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"partitionKeys"
,
String
.
format
(
"array<%s>"
,
HiveDataTypes
.
HIVE_COLUMN
.
n
ame
()),
String
.
format
(
"array<%s>"
,
HiveDataTypes
.
HIVE_COLUMN
.
getN
ame
()),
Multiplicity
.
OPTIONAL
,
false
,
null
),
//
new AttributeDefinition("columns", String.format("array<%s>", DefinedTypes
//
.HIVE_COLUMN.n
ame()),
//
new AttributeDefinition("columns", // todo - ask venkat
//
String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.getN
ame()),
// Multiplicity.COLLECTION, true, null),
new
AttributeDefinition
(
"parameters"
,
STRING_MAP_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
...
...
@@ -398,10 +396,10 @@ public class HiveDataModelGenerator {
Multiplicity
.
OPTIONAL
,
false
,
null
),
};
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_TABLE
.
n
ame
(),
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_TABLE
.
getN
ame
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_TABLE
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_TABLE
.
n
ame
());
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_TABLE
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_TABLE
.
getN
ame
());
}
private
void
createIndexClass
()
throws
MetadataException
{
...
...
@@ -410,17 +408,17 @@ public class HiveDataModelGenerator {
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"indexHandlerClass"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"dbName"
,
HiveDataTypes
.
HIVE_DB
.
n
ame
(),
new
AttributeDefinition
(
"dbName"
,
HiveDataTypes
.
HIVE_DB
.
getN
ame
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"createTime"
,
DataTypes
.
INT_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"lastAccessTime"
,
DataTypes
.
INT_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"origTableName"
,
HiveDataTypes
.
HIVE_TABLE
.
n
ame
(),
new
AttributeDefinition
(
"origTableName"
,
HiveDataTypes
.
HIVE_TABLE
.
getN
ame
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"indexTableName"
,
HiveDataTypes
.
HIVE_TABLE
.
n
ame
(),
new
AttributeDefinition
(
"indexTableName"
,
HiveDataTypes
.
HIVE_TABLE
.
getN
ame
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"sd"
,
HiveDataTypes
.
HIVE_STORAGEDESC
.
n
ame
(),
new
AttributeDefinition
(
"sd"
,
HiveDataTypes
.
HIVE_STORAGEDESC
.
getN
ame
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"parameters"
,
STRING_MAP_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
...
...
@@ -429,37 +427,36 @@ public class HiveDataModelGenerator {
};
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_INDEX
.
n
ame
(),
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_INDEX
.
getN
ame
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_INDEX
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_INDEX
.
n
ame
());
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_INDEX
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_INDEX
.
getN
ame
());
}
private
void
createFunctionClass
()
throws
MetadataException
{
AttributeDefinition
[]
attributeDefinitions
=
new
AttributeDefinition
[]{
new
AttributeDefinition
(
"functionName"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"dbName"
,
HiveDataTypes
.
HIVE_DB
.
n
ame
(),
new
AttributeDefinition
(
"dbName"
,
HiveDataTypes
.
HIVE_DB
.
getN
ame
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"className"
,
DataTypes
.
INT_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"ownerName"
,
DataTypes
.
INT_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"ownerType"
,
HiveDataTypes
.
HIVE_PRINCIPAL
TYPE
.
n
ame
(),
new
AttributeDefinition
(
"ownerType"
,
HiveDataTypes
.
HIVE_PRINCIPAL
_TYPE
.
getN
ame
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"createTime"
,
DataTypes
.
INT_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"functionType"
,
HiveDataTypes
.
HIVE_FUNCTION
TYPE
.
n
ame
(),
new
AttributeDefinition
(
"functionType"
,
HiveDataTypes
.
HIVE_FUNCTION
_TYPE
.
getN
ame
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"resourceUris"
,
HiveDataTypes
.
HIVE_RESOURCEURI
.
n
ame
(),
new
AttributeDefinition
(
"resourceUris"
,
HiveDataTypes
.
HIVE_RESOURCEURI
.
getN
ame
(),
Multiplicity
.
COLLECTION
,
false
,
null
),
};
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_FUNCTION
.
name
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_FUNCTION
.
name
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_FUNCTION
.
name
());
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_FUNCTION
.
getName
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_FUNCTION
.
getName
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_FUNCTION
.
getName
());
}
private
void
createRoleClass
()
throws
MetadataException
{
...
...
@@ -471,12 +468,11 @@ public class HiveDataModelGenerator {
new
AttributeDefinition
(
"ownerName"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
};
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_ROLE
.
name
(),
null
,
attributeDefinitions
);
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_ROLE
.
getName
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_ROLE
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_ROLE
.
n
ame
());
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_ROLE
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_ROLE
.
getN
ame
());
}
private
void
createProcessClass
()
throws
MetadataException
{
...
...
@@ -490,10 +486,10 @@ public class HiveDataModelGenerator {
new
AttributeDefinition
(
"userName"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"sourceTableNames"
,
String
.
format
(
"array<%s>"
,
HiveDataTypes
.
HIVE_TABLE
.
n
ame
()),
String
.
format
(
"array<%s>"
,
HiveDataTypes
.
HIVE_TABLE
.
getN
ame
()),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"targetTableNames"
,
String
.
format
(
"array<%s>"
,
HiveDataTypes
.
HIVE_TABLE
.
n
ame
()),
String
.
format
(
"array<%s>"
,
HiveDataTypes
.
HIVE_TABLE
.
getN
ame
()),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"queryText"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
...
...
@@ -506,9 +502,9 @@ public class HiveDataModelGenerator {
};
HierarchicalTypeDefinition
<
ClassType
>
definition
=
new
HierarchicalTypeDefinition
<>(
ClassType
.
class
,
HiveDataTypes
.
HIVE_PROCESS
.
n
ame
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_PROCESS
.
n
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_PROCESS
.
n
ame
());
ClassType
.
class
,
HiveDataTypes
.
HIVE_PROCESS
.
getN
ame
(),
null
,
attributeDefinitions
);
classTypeDefinitions
.
put
(
HiveDataTypes
.
HIVE_PROCESS
.
getN
ame
(),
definition
);
LOG
.
debug
(
"Created definition for "
+
HiveDataTypes
.
HIVE_PROCESS
.
getN
ame
());
}
public
static
void
main
(
String
[]
args
)
throws
Exception
{
...
...
addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/model/HiveDataTypes.java
View file @
a86f0f3e
...
...
@@ -24,10 +24,10 @@ package org.apache.hadoop.metadata.hive.model;
public
enum
HiveDataTypes
{
// Enums
HIVE_OBJECTTYPE
,
HIVE_PRINCIPALTYPE
,
HIVE_RESOURCETYPE
,
HIVE_FUNCTIONTYPE
,
HIVE_OBJECT
_
TYPE
,
HIVE_PRINCIPAL
_
TYPE
,
HIVE_RESOURCE
_
TYPE
,
HIVE_FUNCTION
_
TYPE
,
// Structs
HIVE_SERDE
,
...
...
@@ -47,4 +47,9 @@ public enum HiveDataTypes {
HIVE_TYPE
,
HIVE_PROCESS
,
// HIVE_VIEW,
;
public
String
getName
()
{
return
name
().
toLowerCase
();
}
}
addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hivetypes/HiveImporter.java
View file @
a86f0f3e
...
...
@@ -46,6 +46,10 @@ import org.slf4j.LoggerFactory;
import
java.util.ArrayList
;
import
java.util.List
;
/**
* todo - this needs to be removed.
*/
@Deprecated
public
class
HiveImporter
{
private
static
final
Logger
LOG
=
...
...
@@ -158,7 +162,7 @@ public class HiveImporter {
LOG
.
debug
(
"creating instance of type "
+
typeName
+
" dataType "
+
dataType
);
ITypedReferenceableInstance
instance
=
(
ITypedReferenceableInstance
)
dataType
.
convert
(
ref
,
Multiplicity
.
OPTIONAL
);
String
guid
=
graphRepository
.
createEntity
(
instance
,
typeName
);
String
guid
=
graphRepository
.
createEntity
(
instance
);
System
.
out
.
println
(
"creating instance of type "
+
typeName
+
" dataType "
+
dataType
+
", guid: "
+
guid
);
...
...
src/main/examples/hive-data-model.json
View file @
a86f0f3e
{
"enumTypes"
:[
{
"name"
:
"HIVE_FUNCTIONTYPE"
,
"enumValues"
:[
{
"value"
:
"JAVA"
,
"ordinal"
:
1
}
]
},
{
"name"
:
"HIVE_PRINCIPALTYPE"
,
"enumValues"
:[
{
"value"
:
"USER"
,
"ordinal"
:
1
},
{
"value"
:
"ROLE"
,
"ordinal"
:
2
},
{
"value"
:
"GROUP"
,
"ordinal"
:
3
}
]
},
{
"name"
:
"HIVE_OBJECTTYPE"
,
"name"
:
"hive_object_type"
,
"enumValues"
:[
{
"value"
:
"GLOBAL"
,
...
...
@@ -52,7 +26,7 @@
]
},
{
"name"
:
"
HIVE_RESOURCETYPE
"
,
"name"
:
"
hive_resource_type
"
,
"enumValues"
:[
{
"value"
:
"JAR"
,
...
...
@@ -67,15 +41,41 @@
"ordinal"
:
3
}
]
},
{
"name"
:
"hive_principal_type"
,
"enumValues"
:[
{
"value"
:
"USER"
,
"ordinal"
:
1
},
{
"value"
:
"ROLE"
,
"ordinal"
:
2
},
{
"value"
:
"GROUP"
,
"ordinal"
:
3
}
]
},
{
"name"
:
"hive_function_type"
,
"enumValues"
:[
{
"value"
:
"JAVA"
,
"ordinal"
:
1
}
]
}
],
"structTypes"
:[
{
"typeName"
:
"
HIVE_RESOURCEURI
"
,
"typeName"
:
"
hive_order
"
,
"attributeDefinitions"
:[
{
"name"
:
"
resourceType
"
,
"dataTypeName"
:
"
HIVE_RESOURCETYPE
"
,
"name"
:
"
col
"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -83,8 +83,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
uri
"
,
"dataTypeName"
:
"
string
"
,
"name"
:
"
order
"
,
"dataTypeName"
:
"
int
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -94,11 +94,11 @@
]
},
{
"typeName"
:
"
HIVE_ORDER
"
,
"typeName"
:
"
hive_resourceuri
"
,
"attributeDefinitions"
:[
{
"name"
:
"
col
"
,
"dataTypeName"
:
"
string
"
,
"name"
:
"
resourceType
"
,
"dataTypeName"
:
"
hive_resource_type
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -106,8 +106,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
order
"
,
"dataTypeName"
:
"
int
"
,
"name"
:
"
uri
"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -117,7 +117,7 @@
]
},
{
"typeName"
:
"
HIVE_SERDE
"
,
"typeName"
:
"
hive_serde
"
,
"attributeDefinitions"
:[
{
"name"
:
"name"
,
...
...
@@ -158,10 +158,10 @@
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"
HIVE_FUNCTION
"
,
"typeName"
:
"
hive_process
"
,
"attributeDefinitions"
:[
{
"name"
:
"
function
Name"
,
"name"
:
"
process
Name"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
...
...
@@ -170,8 +170,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
dbNa
me"
,
"dataTypeName"
:
"
HIVE_DB
"
,
"name"
:
"
startTi
me"
,
"dataTypeName"
:
"
int
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -179,8 +179,26 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
classNa
me"
,
"name"
:
"
endTi
me"
,
"dataTypeName"
:
"int"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"userName"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"sourceTableNames"
,
"dataTypeName"
:
"array<hive_table>"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -188,8 +206,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
ownerName
"
,
"dataTypeName"
:
"
int
"
,
"name"
:
"
targetTableNames
"
,
"dataTypeName"
:
"
array<hive_table>
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -197,8 +215,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
ownerType
"
,
"dataTypeName"
:
"
HIVE_PRINCIPALTYPE
"
,
"name"
:
"
queryText
"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -206,8 +224,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
createTime
"
,
"dataTypeName"
:
"
int
"
,
"name"
:
"
queryPlan
"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -215,8 +233,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
functionType
"
,
"dataTypeName"
:
"
HIVE_FUNCTIONTYPE
"
,
"name"
:
"
queryId
"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -224,9 +242,9 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
resourceUris
"
,
"dataTypeName"
:
"
HIVE_RESOURCEURI
"
,
"multiplicity"
:
"
collection
"
,
"name"
:
"
queryGraph
"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"
optional
"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
...
...
@@ -239,10 +257,10 @@
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"
HIVE_PROCESS
"
,
"typeName"
:
"
hive_function
"
,
"attributeDefinitions"
:[
{
"name"
:
"
process
Name"
,
"name"
:
"
function
Name"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
...
...
@@ -251,8 +269,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
startTi
me"
,
"dataTypeName"
:
"
int
"
,
"name"
:
"
dbNa
me"
,
"dataTypeName"
:
"
hive_db
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -260,26 +278,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
endTi
me"
,
"name"
:
"
classNa
me"
,
"dataTypeName"
:
"int"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"userName"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"sourceTableNames"
,
"dataTypeName"
:
"array<HIVE_TABLE>"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -287,8 +287,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
targetTableNames
"
,
"dataTypeName"
:
"
array<HIVE_TABLE>
"
,
"name"
:
"
ownerName
"
,
"dataTypeName"
:
"
int
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -296,8 +296,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
queryText
"
,
"dataTypeName"
:
"
string
"
,
"name"
:
"
ownerType
"
,
"dataTypeName"
:
"
hive_principal_type
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -305,8 +305,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
queryPlan
"
,
"dataTypeName"
:
"
string
"
,
"name"
:
"
createTime
"
,
"dataTypeName"
:
"
int
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -314,8 +314,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
queryId
"
,
"dataTypeName"
:
"
string
"
,
"name"
:
"
functionType
"
,
"dataTypeName"
:
"
hive_function_type
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -323,9 +323,9 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
queryGraph
"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"
optional
"
,
"name"
:
"
resourceUris
"
,
"dataTypeName"
:
"
hive_resourceuri
"
,
"multiplicity"
:
"
collection
"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
...
...
@@ -338,20 +338,11 @@
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"
HIVE_PARTITION
"
,
"typeName"
:
"
hive_type
"
,
"attributeDefinitions"
:[
{
"name"
:
"
values
"
,
"name"
:
"
name
"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"collection"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"dbName"
,
"dataTypeName"
:
"HIVE_DB"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -359,17 +350,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"tableName"
,
"dataTypeName"
:
"HIVE_TABLE"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"createTime"
,
"dataTypeName"
:
"int"
,
"name"
:
"type1"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -377,8 +359,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
lastAccessTime
"
,
"dataTypeName"
:
"
int
"
,
"name"
:
"
type2
"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -386,17 +368,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"sd"
,
"dataTypeName"
:
"HIVE_STORAGEDESC"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"parameters"
,
"dataTypeName"
:
"map<string,string>"
,
"name"
:
"fields"
,
"dataTypeName"
:
"array<hive_column>"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -410,37 +383,28 @@
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"
HIVE_STORAGEDESC
"
,
"typeName"
:
"
hive_table
"
,
"attributeDefinitions"
:[
{
"name"
:
"cols"
,
"dataTypeName"
:
"array<HIVE_COLUMN>"
,
"multiplicity"
:
"collection"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"location"
,
"name"
:
"tableName"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"
optional
"
,
"multiplicity"
:
"
required
"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"
inputFormat
"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"
optional
"
,
"name"
:
"
dbName
"
,
"dataTypeName"
:
"
hive_db
"
,
"multiplicity"
:
"
required
"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"o
utputFormat
"
,
"name"
:
"o
wner
"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
...
...
@@ -449,16 +413,16 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"c
ompressed
"
,
"dataTypeName"
:
"
boolean
"
,
"multiplicity"
:
"
required
"
,
"name"
:
"c
reateTime
"
,
"dataTypeName"
:
"
int
"
,
"multiplicity"
:
"
optional
"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"
numBuckets
"
,
"name"
:
"
lastAccessTime
"
,
"dataTypeName"
:
"int"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
...
...
@@ -467,8 +431,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
serdeInfo
"
,
"dataTypeName"
:
"
HIVE_SERDE
"
,
"name"
:
"
retention
"
,
"dataTypeName"
:
"
int
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -476,8 +440,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
bucketCols
"
,
"dataTypeName"
:
"
array<string>
"
,
"name"
:
"
sd
"
,
"dataTypeName"
:
"
hive_storagedesc
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -485,8 +449,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
sortCol
s"
,
"dataTypeName"
:
"array<
HIVE_ORDER
>"
,
"name"
:
"
partitionKey
s"
,
"dataTypeName"
:
"array<
hive_column
>"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -503,34 +467,16 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"storedAsSubDirectories"
,
"dataTypeName"
:
"boolean"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
}
]
},
{
"superTypes"
:[
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"HIVE_TYPE"
,
"attributeDefinitions"
:[
{
"name"
:
"name"
,
"name"
:
"viewOriginalText"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"
required
"
,
"multiplicity"
:
"
optional
"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"
type1
"
,
"name"
:
"
viewExpandedText
"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
...
...
@@ -539,7 +485,7 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"t
ype2
"
,
"name"
:
"t
ableType
"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
...
...
@@ -548,8 +494,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
fields
"
,
"dataTypeName"
:
"
array<HIVE_COLUMN>
"
,
"name"
:
"
temporary
"
,
"dataTypeName"
:
"
boolean
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -563,11 +509,20 @@
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"
HIVE_COLUMN
"
,
"typeName"
:
"
hive_partition
"
,
"attributeDefinitions"
:[
{
"name"
:
"
name
"
,
"name"
:
"
values
"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"collection"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"dbName"
,
"dataTypeName"
:
"hive_db"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -575,8 +530,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"t
yp
e"
,
"dataTypeName"
:
"
string
"
,
"name"
:
"t
ableNam
e"
,
"dataTypeName"
:
"
hive_table
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -584,35 +539,26 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"c
omment
"
,
"dataTypeName"
:
"
string
"
,
"name"
:
"c
reateTime
"
,
"dataTypeName"
:
"
int
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
}
]
},
{
"superTypes"
:[
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"HIVE_ROLE"
,
"attributeDefinitions"
:[
},
{
"name"
:
"
roleNa
me"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"
required
"
,
"name"
:
"
lastAccessTi
me"
,
"dataTypeName"
:
"
int
"
,
"multiplicity"
:
"
optional
"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"
createTime
"
,
"dataTypeName"
:
"
int
"
,
"name"
:
"
sd
"
,
"dataTypeName"
:
"
hive_storagedesc
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -620,9 +566,9 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
ownerName
"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"
required
"
,
"name"
:
"
parameters
"
,
"dataTypeName"
:
"
map<string,string>
"
,
"multiplicity"
:
"
optional
"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
...
...
@@ -635,28 +581,19 @@
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"
HIVE_TABLE
"
,
"typeName"
:
"
hive_storagedesc
"
,
"attributeDefinitions"
:[
{
"name"
:
"tableName"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"dbName"
,
"dataTypeName"
:
"HIVE_DB"
,
"multiplicity"
:
"required"
,
"name"
:
"cols"
,
"dataTypeName"
:
"array<hive_column>"
,
"multiplicity"
:
"collection"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"
owner
"
,
"name"
:
"
location
"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
...
...
@@ -665,17 +602,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"createTime"
,
"dataTypeName"
:
"int"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"lastAccessTime"
,
"dataTypeName"
:
"int"
,
"name"
:
"inputFormat"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -683,8 +611,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
retention
"
,
"dataTypeName"
:
"
int
"
,
"name"
:
"
outputFormat
"
,
"dataTypeName"
:
"
string
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -692,17 +620,17 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
s
d"
,
"dataTypeName"
:
"
HIVE_STORAGEDESC
"
,
"multiplicity"
:
"
optional
"
,
"name"
:
"
compresse
d"
,
"dataTypeName"
:
"
boolean
"
,
"multiplicity"
:
"
required
"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"
partitionKey
s"
,
"dataTypeName"
:
"
array<HIVE_COLUMN>
"
,
"name"
:
"
numBucket
s"
,
"dataTypeName"
:
"
int
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -710,8 +638,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
parameters
"
,
"dataTypeName"
:
"
map<string,string>
"
,
"name"
:
"
serdeInfo
"
,
"dataTypeName"
:
"
hive_serde
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -719,8 +647,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
viewOriginalText
"
,
"dataTypeName"
:
"
string
"
,
"name"
:
"
bucketCols
"
,
"dataTypeName"
:
"
array<string>
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -728,8 +656,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
viewExpandedText
"
,
"dataTypeName"
:
"
string
"
,
"name"
:
"
sortCols
"
,
"dataTypeName"
:
"
array<hive_order>
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -737,8 +665,8 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
tableType
"
,
"dataTypeName"
:
"
string
"
,
"name"
:
"
parameters
"
,
"dataTypeName"
:
"
map<string,string>
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -746,7 +674,7 @@
"reverseAttributeName"
:
null
},
{
"name"
:
"
temporary
"
,
"name"
:
"
storedAsSubDirectories
"
,
"dataTypeName"
:
"boolean"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
...
...
@@ -761,7 +689,7 @@
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"
HIVE_INDEX
"
,
"typeName"
:
"
hive_index
"
,
"attributeDefinitions"
:[
{
"name"
:
"indexName"
,
...
...
@@ -783,7 +711,7 @@
},
{
"name"
:
"dbName"
,
"dataTypeName"
:
"
HIVE_DB
"
,
"dataTypeName"
:
"
hive_db
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -810,7 +738,7 @@
},
{
"name"
:
"origTableName"
,
"dataTypeName"
:
"
HIVE_TABLE
"
,
"dataTypeName"
:
"
hive_table
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -819,7 +747,7 @@
},
{
"name"
:
"indexTableName"
,
"dataTypeName"
:
"
HIVE_TABLE
"
,
"dataTypeName"
:
"
hive_table
"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -828,7 +756,7 @@
},
{
"name"
:
"sd"
,
"dataTypeName"
:
"
HIVE_STORAGEDESC
"
,
"dataTypeName"
:
"
hive_storagedesc
"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -860,7 +788,43 @@
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"HIVE_DB"
,
"typeName"
:
"hive_role"
,
"attributeDefinitions"
:[
{
"name"
:
"roleName"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"createTime"
,
"dataTypeName"
:
"int"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"ownerName"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
}
]
},
{
"superTypes"
:[
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"hive_db"
,
"attributeDefinitions"
:[
{
"name"
:
"name"
,
...
...
@@ -909,7 +873,43 @@
},
{
"name"
:
"ownerType"
,
"dataTypeName"
:
"HIVE_PRINCIPALTYPE"
,
"dataTypeName"
:
"hive_principal_type"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
}
]
},
{
"superTypes"
:[
],
"hierarchicalMetaTypeName"
:
"org.apache.hadoop.metadata.typesystem.types.ClassType"
,
"typeName"
:
"hive_column"
,
"attributeDefinitions"
:[
{
"name"
:
"name"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"type"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"required"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
"isIndexable"
:
true
,
"reverseAttributeName"
:
null
},
{
"name"
:
"comment"
,
"dataTypeName"
:
"string"
,
"multiplicity"
:
"optional"
,
"isComposite"
:
false
,
"isUnique"
:
false
,
...
...
@@ -919,4 +919,4 @@
]
}
]
}
\ No newline at end of file
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment