Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
40826d14
Commit
40826d14
authored
May 24, 2015
by
Shwetha GS
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
added cluster name attribute to hive db
parent
114fa269
Expand all
Show whitespace changes
Inline
Side-by-side
Showing
9 changed files
with
61 additions
and
121 deletions
+61
-121
pom.xml
addons/hive-bridge/pom.xml
+1
-1
HiveMetaStoreBridge.java
...ache/hadoop/metadata/hive/bridge/HiveMetaStoreBridge.java
+25
-35
HiveHook.java
...n/java/org/apache/hadoop/metadata/hive/hook/HiveHook.java
+0
-0
HiveDataModelGenerator.java
...he/hadoop/metadata/hive/model/HiveDataModelGenerator.java
+2
-2
Bridge-Hive.twiki
addons/hive-bridge/src/site/twiki/Bridge-Hive.twiki
+9
-1
HiveHookIT.java
...java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java
+22
-13
SSLAndKerberosHiveHookIT.java
...e/hadoop/metadata/hive/hook/SSLAndKerberosHiveHookIT.java
+0
-34
SSLHiveHookIT.java
...a/org/apache/hadoop/metadata/hive/hook/SSLHiveHookIT.java
+0
-34
EnumType.java
...org/apache/hadoop/metadata/typesystem/types/EnumType.java
+2
-1
No files found.
addons/hive-bridge/pom.xml
View file @
40826d14
...
...
@@ -33,7 +33,7 @@
<packaging>
jar
</packaging>
<properties>
<hive.version>
1.
1
.0
</hive.version>
<hive.version>
1.
2
.0
</hive.version>
<calcite.version>
0.9.2-incubating
</calcite.version>
<hadoop.version>
2.6.0
</hadoop.version>
</properties>
...
...
addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/bridge/HiveMetaStoreBridge.java
View file @
40826d14
...
...
@@ -32,13 +32,10 @@ import org.apache.hadoop.hive.ql.metadata.Table;
import
org.apache.hadoop.metadata.MetadataServiceClient
;
import
org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator
;
import
org.apache.hadoop.metadata.hive.model.HiveDataTypes
;
import
org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance
;
import
org.apache.hadoop.metadata.typesystem.Referenceable
;
import
org.apache.hadoop.metadata.typesystem.Struct
;
import
org.apache.hadoop.metadata.typesystem.json.InstanceSerialization
;
import
org.apache.hadoop.metadata.typesystem.json.Serialization
;
import
org.apache.hadoop.metadata.typesystem.persistence.Id
;
import
org.apache.hadoop.metadata.typesystem.types.TypeSystem
;
import
org.codehaus.jettison.json.JSONArray
;
import
org.codehaus.jettison.json.JSONException
;
import
org.codehaus.jettison.json.JSONObject
;
...
...
@@ -55,20 +52,9 @@ import java.util.Set;
*/
public
class
HiveMetaStoreBridge
{
private
static
final
String
DEFAULT_DGI_URL
=
"http://localhost:21000/"
;
public
static
class
Pair
<
S
,
T
>
{
public
S
first
;
public
T
second
;
public
Pair
(
S
first
,
T
second
)
{
this
.
first
=
first
;
this
.
second
=
second
;
}
public
static
<
S
,
T
>
Pair
of
(
S
first
,
T
second
)
{
return
new
Pair
(
first
,
second
);
}
}
public
static
final
String
HIVE_CLUSTER_NAME
=
"hive.cluster.name"
;
public
static
final
String
DEFAULT_CLUSTER_NAME
=
"primary"
;
private
final
String
clusterName
;
public
static
final
String
DGI_URL_PROPERTY
=
"hive.hook.dgi.url"
;
...
...
@@ -82,6 +68,7 @@ public class HiveMetaStoreBridge {
* @param hiveConf
*/
public
HiveMetaStoreBridge
(
HiveConf
hiveConf
)
throws
Exception
{
clusterName
=
hiveConf
.
get
(
HIVE_CLUSTER_NAME
,
DEFAULT_CLUSTER_NAME
);
hiveClient
=
Hive
.
get
(
hiveConf
);
metadataServiceClient
=
new
MetadataServiceClient
(
hiveConf
.
get
(
DGI_URL_PROPERTY
,
DEFAULT_DGI_URL
));
}
...
...
@@ -107,16 +94,20 @@ public class HiveMetaStoreBridge {
/**
* Gets reference for the database
*
* @param dbName database name
*
* @param databaseName
* @param clusterName cluster name
* @return Reference for database if exists, else null
* @throws Exception
*/
private
Referenceable
getDatabaseReference
(
String
d
b
Name
)
throws
Exception
{
LOG
.
debug
(
"Getting reference for database {}"
,
d
b
Name
);
private
Referenceable
getDatabaseReference
(
String
d
atabaseName
,
String
cluster
Name
)
throws
Exception
{
LOG
.
debug
(
"Getting reference for database {}"
,
d
atabase
Name
);
String
typeName
=
HiveDataTypes
.
HIVE_DB
.
getName
();
MetadataServiceClient
dgiClient
=
getMetadataServiceClient
();
JSONArray
results
=
dgiClient
.
rawSearch
(
typeName
,
"name"
,
dbName
);
String
dslQuery
=
String
.
format
(
"%s where name = '%s' and clusterName = '%s'"
,
HiveDataTypes
.
HIVE_DB
.
getName
(),
databaseName
,
clusterName
);
JSONArray
results
=
dgiClient
.
searchByDSL
(
dslQuery
);
if
(
results
.
length
()
==
0
)
{
return
null
;
}
else
{
...
...
@@ -126,13 +117,14 @@ public class HiveMetaStoreBridge {
}
public
Referenceable
registerDatabase
(
String
databaseName
)
throws
Exception
{
Referenceable
dbRef
=
getDatabaseReference
(
databaseName
);
Referenceable
dbRef
=
getDatabaseReference
(
databaseName
,
clusterName
);
if
(
dbRef
==
null
)
{
LOG
.
info
(
"Importing objects from databaseName : "
+
databaseName
);
Database
hiveDB
=
hiveClient
.
getDatabase
(
databaseName
);
dbRef
=
new
Referenceable
(
HiveDataTypes
.
HIVE_DB
.
getName
());
dbRef
.
set
(
"name"
,
hiveDB
.
getName
());
dbRef
.
set
(
"clusterName"
,
clusterName
);
dbRef
.
set
(
"description"
,
hiveDB
.
getDescription
());
dbRef
.
set
(
"locationUri"
,
hiveDB
.
getLocationUri
());
dbRef
.
set
(
"parameters"
,
hiveDB
.
getParameters
());
...
...
@@ -168,7 +160,7 @@ public class HiveMetaStoreBridge {
Referenceable
tableReferenceable
=
registerTable
(
databaseReferenceable
,
databaseName
,
tableName
);
// Import Partitions
Referenceable
sdReferenceable
=
getSDForTable
(
database
Referenceabl
e
,
tableName
);
Referenceable
sdReferenceable
=
getSDForTable
(
database
Nam
e
,
tableName
);
importPartitions
(
databaseName
,
tableName
,
databaseReferenceable
,
tableReferenceable
,
sdReferenceable
);
// Import Indexes
...
...
@@ -179,28 +171,26 @@ public class HiveMetaStoreBridge {
/**
* Gets reference for the table
*
* @param db
Ref
* @param db
Name
* @param tableName table name
* @return table reference if exists, else null
* @throws Exception
*/
private
Referenceable
getTableReference
(
Referenceable
dbRef
,
String
tableName
)
throws
Exception
{
LOG
.
debug
(
"Getting reference for table {}.{}"
,
db
Ref
,
tableName
);
private
Referenceable
getTableReference
(
String
dbName
,
String
tableName
)
throws
Exception
{
LOG
.
debug
(
"Getting reference for table {}.{}"
,
db
Name
,
tableName
);
String
typeName
=
HiveDataTypes
.
HIVE_TABLE
.
getName
();
MetadataServiceClient
dgiClient
=
getMetadataServiceClient
();
//todo DSL support for reference doesn't work. is the usage right?
// String query = String.format("%s where dbName = \"%s\" and tableName = \"%s\"", typeName, dbRef.getId().id,
// tableName);
String
query
=
String
.
format
(
"%s where name = \"%s\""
,
typeName
,
tableName
);
String
query
=
String
.
format
(
"%s where name = '%s', dbName where name = '%s' and clusterName = '%s'"
,
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
tableName
,
dbName
,
clusterName
);
JSONArray
results
=
dgiClient
.
searchByDSL
(
query
);
if
(
results
.
length
()
==
0
)
{
return
null
;
}
else
{
//There should be just one instance with the given name
String
guid
=
getGuidFromDSLResponse
(
results
.
getJSONObject
(
0
));
LOG
.
debug
(
"Got reference for table {}.{} = {}"
,
db
Ref
,
tableName
,
guid
);
LOG
.
debug
(
"Got reference for table {}.{} = {}"
,
db
Name
,
tableName
,
guid
);
return
new
Referenceable
(
guid
,
typeName
,
null
);
}
}
...
...
@@ -209,10 +199,10 @@ public class HiveMetaStoreBridge {
return
jsonObject
.
getJSONObject
(
"$id$"
).
getString
(
"id"
);
}
private
Referenceable
getSDForTable
(
Referenceable
dbRef
,
String
tableName
)
throws
Exception
{
Referenceable
tableRef
=
getTableReference
(
db
Ref
,
tableName
);
private
Referenceable
getSDForTable
(
String
dbName
,
String
tableName
)
throws
Exception
{
Referenceable
tableRef
=
getTableReference
(
db
Name
,
tableName
);
if
(
tableRef
==
null
)
{
throw
new
IllegalArgumentException
(
"Table "
+
db
Ref
+
"."
+
tableName
+
" doesn't exist"
);
throw
new
IllegalArgumentException
(
"Table "
+
db
Name
+
"."
+
tableName
+
" doesn't exist"
);
}
MetadataServiceClient
dgiClient
=
getMetadataServiceClient
();
...
...
@@ -228,7 +218,7 @@ public class HiveMetaStoreBridge {
public
Referenceable
registerTable
(
Referenceable
dbReference
,
String
dbName
,
String
tableName
)
throws
Exception
{
LOG
.
info
(
"Attempting to register table ["
+
tableName
+
"]"
);
Referenceable
tableRef
=
getTableReference
(
db
Referenc
e
,
tableName
);
Referenceable
tableRef
=
getTableReference
(
db
Nam
e
,
tableName
);
if
(
tableRef
==
null
)
{
LOG
.
info
(
"Importing objects from "
+
dbName
+
"."
+
tableName
);
...
...
addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/hook/HiveHook.java
View file @
40826d14
This diff is collapsed.
Click to expand it.
addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/model/HiveDataModelGenerator.java
View file @
40826d14
...
...
@@ -280,6 +280,8 @@ public class HiveDataModelGenerator {
AttributeDefinition
[]
attributeDefinitions
=
new
AttributeDefinition
[]{
new
AttributeDefinition
(
"name"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"clusterName"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"description"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"locationUri"
,
DataTypes
.
STRING_TYPE
.
getName
(),
...
...
@@ -322,8 +324,6 @@ public class HiveDataModelGenerator {
AttributeDefinition
[]
attributeDefinitions
=
new
AttributeDefinition
[]{
new
AttributeDefinition
(
"name"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
//new AttributeDefinition("type", DefinedTypes.HIVE_TYPE.getName(), Multiplicity
// .REQUIRED, false, null),
new
AttributeDefinition
(
"type"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"comment"
,
DataTypes
.
STRING_TYPE
.
getName
(),
...
...
addons/hive-bridge/src/site/twiki/Bridge-Hive.twiki
View file @
40826d14
...
...
@@ -29,6 +29,10 @@ hive conf directory:
<name>hive.hook.dgi.url</name>
<value>http://localhost:21000/</value>
</property>
<property>
<name>hive.cluster.name</name>
<value>primary</value>
</property>
</verbatim>
Usage: <dgi package>/bin/import-hive.sh. The logs are in <dgi package>/logs/import-hive.log
...
...
@@ -44,12 +48,16 @@ The hook submits the request to a thread pool executor to avoid blocking the com
<value>org.apache.hadoop.metadata.hive.hook.HiveHook</value>
</property>
</verbatim>
* Add the following propert
y
in hive-ste.xml with the DGI endpoint for your set-up
* Add the following propert
ies
in hive-ste.xml with the DGI endpoint for your set-up
<verbatim>
<property>
<name>hive.hook.dgi.url</name>
<value>http://localhost:21000/</value>
</property>
<property>
<name>hive.cluster.name</name>
<value>primary</value>
</property>
</verbatim>
* Add 'export HIVE_AUX_JARS_PATH=<dgi package>/hook/hive' in hive-env.sh
...
...
addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java
View file @
40826d14
...
...
@@ -24,16 +24,15 @@ import org.apache.hadoop.hive.ql.Driver;
import
org.apache.hadoop.hive.ql.session.SessionState
;
import
org.apache.hadoop.metadata.MetadataServiceClient
;
import
org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge
;
import
org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator
;
import
org.apache.hadoop.metadata.hive.model.HiveDataTypes
;
import
org.codehaus.jettison.json.JSONArray
;
import
org.codehaus.jettison.json.JSONObject
;
import
org.testng.Assert
;
import
org.testng.annotations.BeforeClass
;
import
org.testng.annotations.Test
;
public
class
HiveHookIT
{
private
static
final
String
DGI_URL
=
"http://localhost:21000/"
;
private
static
final
String
CLUSTER_NAME
=
"test"
;
private
Driver
driver
;
private
MetadataServiceClient
dgiCLient
;
private
SessionState
ss
;
...
...
@@ -59,6 +58,7 @@ public class HiveHookIT {
hiveConf
.
set
(
HiveMetaStoreBridge
.
DGI_URL_PROPERTY
,
DGI_URL
);
hiveConf
.
set
(
"javax.jdo.option.ConnectionURL"
,
"jdbc:derby:./target/metastore_db;create=true"
);
hiveConf
.
set
(
"hive.hook.dgi.synchronous"
,
"true"
);
hiveConf
.
set
(
HiveMetaStoreBridge
.
HIVE_CLUSTER_NAME
,
CLUSTER_NAME
);
return
hiveConf
;
}
...
...
@@ -82,11 +82,11 @@ public class HiveHookIT {
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
dbName
+
"."
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
assertTableIsRegistered
(
dbName
,
tableName
);
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
assertTableIsRegistered
(
"default"
,
tableName
);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered
(
"default"
);
...
...
@@ -97,24 +97,33 @@ public class HiveHookIT {
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
String
new
TableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
String
query
=
"create table "
+
new
TableName
+
" as select * from "
+
tableName
;
String
ctas
TableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
String
query
=
"create table "
+
ctas
TableName
+
" as select * from "
+
tableName
;
runCommand
(
query
);
assertTableIsRegistered
(
new
TableName
);
assert
InstanceIsRegistered
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
"queryText"
,
query
);
assertTableIsRegistered
(
"default"
,
ctas
TableName
);
assert
ProcessIsRegistered
(
query
);
}
private
void
assertTableIsRegistered
(
String
tableName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
"name"
,
tableName
);
private
void
assertProcessIsRegistered
(
String
queryStr
)
throws
Exception
{
String
dslQuery
=
String
.
format
(
"%s where queryText = '%s'"
,
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
queryStr
);
assertInstanceIsRegistered
(
dslQuery
);
}
private
void
assertTableIsRegistered
(
String
dbName
,
String
tableName
)
throws
Exception
{
String
query
=
String
.
format
(
"%s where name = '%s', dbName where name = '%s' and clusterName = '%s'"
,
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
tableName
,
dbName
,
CLUSTER_NAME
);
assertInstanceIsRegistered
(
query
);
}
private
void
assertDatabaseIsRegistered
(
String
dbName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_DB
.
getName
(),
"name"
,
dbName
);
String
query
=
String
.
format
(
"%s where name = '%s' and clusterName = '%s'"
,
HiveDataTypes
.
HIVE_DB
.
getName
(),
dbName
,
CLUSTER_NAME
);
assertInstanceIsRegistered
(
query
);
}
private
void
assertInstanceIsRegistered
(
String
typeName
,
String
colName
,
String
colValue
)
throws
Exception
{
JSONArray
results
=
dgiCLient
.
rawSearch
(
typeName
,
colName
,
colValue
);
private
void
assertInstanceIsRegistered
(
String
dslQuery
)
throws
Exception
{
JSONArray
results
=
dgiCLient
.
searchByDSL
(
dslQuery
);
Assert
.
assertEquals
(
results
.
length
(),
1
);
}
}
addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/SSLAndKerberosHiveHookIT.java
View file @
40826d14
...
...
@@ -205,40 +205,6 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
assertDatabaseIsRegistered
(
dbName
);
}
@Test
public
void
testCreateTable
()
throws
Exception
{
String
dbName
=
"db"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create database "
+
dbName
);
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
dbName
+
"."
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered
(
"default"
);
}
@Test
public
void
testCTAS
()
throws
Exception
{
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
String
newTableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
String
query
=
"create table "
+
newTableName
+
" as select * from "
+
tableName
;
runCommand
(
query
);
assertTableIsRegistered
(
newTableName
);
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
"queryText"
,
query
);
}
private
void
assertTableIsRegistered
(
String
tableName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
"name"
,
tableName
);
}
private
void
assertDatabaseIsRegistered
(
String
dbName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_DB
.
getName
(),
"name"
,
dbName
);
}
...
...
addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/SSLHiveHookIT.java
View file @
40826d14
...
...
@@ -208,40 +208,6 @@ public class SSLHiveHookIT {
assertDatabaseIsRegistered
(
dbName
);
}
@Test
public
void
testCreateTable
()
throws
Exception
{
String
dbName
=
"db"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create database "
+
dbName
);
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
dbName
+
"."
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered
(
"default"
);
}
@Test
public
void
testCTAS
()
throws
Exception
{
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
String
newTableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
String
query
=
"create table "
+
newTableName
+
" as select * from "
+
tableName
;
runCommand
(
query
);
assertTableIsRegistered
(
newTableName
);
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
"queryText"
,
query
);
}
private
void
assertTableIsRegistered
(
String
tableName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
"name"
,
tableName
);
}
private
void
assertDatabaseIsRegistered
(
String
dbName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_DB
.
getName
(),
"name"
,
dbName
);
}
...
...
typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/EnumType.java
View file @
40826d14
...
...
@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.typesystem.types;
import
com.google.common.collect.ImmutableCollection
;
import
com.google.common.collect.ImmutableMap
;
import
org.apache.hadoop.metadata.MetadataException
;
import
scala.math.BigInt
;
public
class
EnumType
extends
AbstractDataType
<
EnumValue
>
{
...
...
@@ -54,7 +55,7 @@ public class EnumType extends AbstractDataType<EnumValue> {
EnumValue
e
=
null
;
if
(
val
instanceof
EnumValue
)
{
e
=
valueMap
.
get
(((
EnumValue
)
val
).
value
);
}
else
if
(
val
instanceof
Integer
)
{
}
else
if
(
val
instanceof
Integer
||
val
instanceof
BigInt
)
{
e
=
ordinalMap
.
get
(
val
);
}
else
if
(
val
instanceof
String
)
{
e
=
valueMap
.
get
(
val
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment