Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
113313e2
Commit
113313e2
authored
May 26, 2015
by
Suma S
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #101 from hortonworks/master
Merging from master to DAL
parents
cee73f6c
63fd0a25
Show whitespace changes
Inline
Side-by-side
Showing
14 changed files
with
146 additions
and
420 deletions
+146
-420
pom.xml
addons/hive-bridge/pom.xml
+1
-1
HiveMetaStoreBridge.java
...ache/hadoop/metadata/hive/bridge/HiveMetaStoreBridge.java
+25
-35
HiveHook.java
...n/java/org/apache/hadoop/metadata/hive/hook/HiveHook.java
+61
-293
HiveDataModelGenerator.java
...he/hadoop/metadata/hive/model/HiveDataModelGenerator.java
+2
-2
Bridge-Hive.twiki
addons/hive-bridge/src/site/twiki/Bridge-Hive.twiki
+9
-1
HiveHookIT.java
...java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java
+22
-13
SSLAndKerberosHiveHookIT.java
...e/hadoop/metadata/hive/hook/SSLAndKerberosHiveHookIT.java
+0
-34
SSLHiveHookIT.java
...a/org/apache/hadoop/metadata/hive/hook/SSLHiveHookIT.java
+0
-34
MetadataServiceClient.java
...ava/org/apache/hadoop/metadata/MetadataServiceClient.java
+1
-0
GraphBackedTypeStore.java
...p/metadata/repository/typestore/GraphBackedTypeStore.java
+3
-3
EnumType.java
...org/apache/hadoop/metadata/typesystem/types/EnumType.java
+2
-1
TypesSerialization.scala
.../hadoop/metadata/typesystem/json/TypesSerialization.scala
+1
-1
Servlets.java
...in/java/org/apache/hadoop/metadata/web/util/Servlets.java
+16
-2
EntityJerseyResourceIT.java
...hadoop/metadata/web/resources/EntityJerseyResourceIT.java
+3
-0
No files found.
addons/hive-bridge/pom.xml
View file @
113313e2
...
...
@@ -33,7 +33,7 @@
<packaging>
jar
</packaging>
<properties>
<hive.version>
1.
1
.0
</hive.version>
<hive.version>
1.
2
.0
</hive.version>
<calcite.version>
0.9.2-incubating
</calcite.version>
<hadoop.version>
2.6.0
</hadoop.version>
</properties>
...
...
addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/bridge/HiveMetaStoreBridge.java
View file @
113313e2
...
...
@@ -32,13 +32,10 @@ import org.apache.hadoop.hive.ql.metadata.Table;
import
org.apache.hadoop.metadata.MetadataServiceClient
;
import
org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator
;
import
org.apache.hadoop.metadata.hive.model.HiveDataTypes
;
import
org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance
;
import
org.apache.hadoop.metadata.typesystem.Referenceable
;
import
org.apache.hadoop.metadata.typesystem.Struct
;
import
org.apache.hadoop.metadata.typesystem.json.InstanceSerialization
;
import
org.apache.hadoop.metadata.typesystem.json.Serialization
;
import
org.apache.hadoop.metadata.typesystem.persistence.Id
;
import
org.apache.hadoop.metadata.typesystem.types.TypeSystem
;
import
org.codehaus.jettison.json.JSONArray
;
import
org.codehaus.jettison.json.JSONException
;
import
org.codehaus.jettison.json.JSONObject
;
...
...
@@ -55,20 +52,9 @@ import java.util.Set;
*/
public
class
HiveMetaStoreBridge
{
private
static
final
String
DEFAULT_DGI_URL
=
"http://localhost:21000/"
;
public
static
class
Pair
<
S
,
T
>
{
public
S
first
;
public
T
second
;
public
Pair
(
S
first
,
T
second
)
{
this
.
first
=
first
;
this
.
second
=
second
;
}
public
static
<
S
,
T
>
Pair
of
(
S
first
,
T
second
)
{
return
new
Pair
(
first
,
second
);
}
}
public
static
final
String
HIVE_CLUSTER_NAME
=
"hive.cluster.name"
;
public
static
final
String
DEFAULT_CLUSTER_NAME
=
"primary"
;
private
final
String
clusterName
;
public
static
final
String
DGI_URL_PROPERTY
=
"hive.hook.dgi.url"
;
...
...
@@ -82,6 +68,7 @@ public class HiveMetaStoreBridge {
* @param hiveConf
*/
public
HiveMetaStoreBridge
(
HiveConf
hiveConf
)
throws
Exception
{
clusterName
=
hiveConf
.
get
(
HIVE_CLUSTER_NAME
,
DEFAULT_CLUSTER_NAME
);
hiveClient
=
Hive
.
get
(
hiveConf
);
metadataServiceClient
=
new
MetadataServiceClient
(
hiveConf
.
get
(
DGI_URL_PROPERTY
,
DEFAULT_DGI_URL
));
}
...
...
@@ -107,16 +94,20 @@ public class HiveMetaStoreBridge {
/**
* Gets reference for the database
*
* @param dbName database name
*
* @param databaseName
* @param clusterName cluster name
* @return Reference for database if exists, else null
* @throws Exception
*/
private
Referenceable
getDatabaseReference
(
String
d
b
Name
)
throws
Exception
{
LOG
.
debug
(
"Getting reference for database {}"
,
d
b
Name
);
private
Referenceable
getDatabaseReference
(
String
d
atabaseName
,
String
cluster
Name
)
throws
Exception
{
LOG
.
debug
(
"Getting reference for database {}"
,
d
atabase
Name
);
String
typeName
=
HiveDataTypes
.
HIVE_DB
.
getName
();
MetadataServiceClient
dgiClient
=
getMetadataServiceClient
();
JSONArray
results
=
dgiClient
.
rawSearch
(
typeName
,
"name"
,
dbName
);
String
dslQuery
=
String
.
format
(
"%s where name = '%s' and clusterName = '%s'"
,
HiveDataTypes
.
HIVE_DB
.
getName
(),
databaseName
,
clusterName
);
JSONArray
results
=
dgiClient
.
searchByDSL
(
dslQuery
);
if
(
results
.
length
()
==
0
)
{
return
null
;
}
else
{
...
...
@@ -126,13 +117,14 @@ public class HiveMetaStoreBridge {
}
public
Referenceable
registerDatabase
(
String
databaseName
)
throws
Exception
{
Referenceable
dbRef
=
getDatabaseReference
(
databaseName
);
Referenceable
dbRef
=
getDatabaseReference
(
databaseName
,
clusterName
);
if
(
dbRef
==
null
)
{
LOG
.
info
(
"Importing objects from databaseName : "
+
databaseName
);
Database
hiveDB
=
hiveClient
.
getDatabase
(
databaseName
);
dbRef
=
new
Referenceable
(
HiveDataTypes
.
HIVE_DB
.
getName
());
dbRef
.
set
(
"name"
,
hiveDB
.
getName
());
dbRef
.
set
(
"clusterName"
,
clusterName
);
dbRef
.
set
(
"description"
,
hiveDB
.
getDescription
());
dbRef
.
set
(
"locationUri"
,
hiveDB
.
getLocationUri
());
dbRef
.
set
(
"parameters"
,
hiveDB
.
getParameters
());
...
...
@@ -168,7 +160,7 @@ public class HiveMetaStoreBridge {
Referenceable
tableReferenceable
=
registerTable
(
databaseReferenceable
,
databaseName
,
tableName
);
// Import Partitions
Referenceable
sdReferenceable
=
getSDForTable
(
database
Referenceabl
e
,
tableName
);
Referenceable
sdReferenceable
=
getSDForTable
(
database
Nam
e
,
tableName
);
importPartitions
(
databaseName
,
tableName
,
databaseReferenceable
,
tableReferenceable
,
sdReferenceable
);
// Import Indexes
...
...
@@ -179,28 +171,26 @@ public class HiveMetaStoreBridge {
/**
* Gets reference for the table
*
* @param db
Ref
* @param db
Name
* @param tableName table name
* @return table reference if exists, else null
* @throws Exception
*/
private
Referenceable
getTableReference
(
Referenceable
dbRef
,
String
tableName
)
throws
Exception
{
LOG
.
debug
(
"Getting reference for table {}.{}"
,
db
Ref
,
tableName
);
private
Referenceable
getTableReference
(
String
dbName
,
String
tableName
)
throws
Exception
{
LOG
.
debug
(
"Getting reference for table {}.{}"
,
db
Name
,
tableName
);
String
typeName
=
HiveDataTypes
.
HIVE_TABLE
.
getName
();
MetadataServiceClient
dgiClient
=
getMetadataServiceClient
();
//todo DSL support for reference doesn't work. is the usage right?
// String query = String.format("%s where dbName = \"%s\" and tableName = \"%s\"", typeName, dbRef.getId().id,
// tableName);
String
query
=
String
.
format
(
"%s where name = \"%s\""
,
typeName
,
tableName
);
String
query
=
String
.
format
(
"%s where name = '%s', dbName where name = '%s' and clusterName = '%s'"
,
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
tableName
,
dbName
,
clusterName
);
JSONArray
results
=
dgiClient
.
searchByDSL
(
query
);
if
(
results
.
length
()
==
0
)
{
return
null
;
}
else
{
//There should be just one instance with the given name
String
guid
=
getGuidFromDSLResponse
(
results
.
getJSONObject
(
0
));
LOG
.
debug
(
"Got reference for table {}.{} = {}"
,
db
Ref
,
tableName
,
guid
);
LOG
.
debug
(
"Got reference for table {}.{} = {}"
,
db
Name
,
tableName
,
guid
);
return
new
Referenceable
(
guid
,
typeName
,
null
);
}
}
...
...
@@ -209,10 +199,10 @@ public class HiveMetaStoreBridge {
return
jsonObject
.
getJSONObject
(
"$id$"
).
getString
(
"id"
);
}
private
Referenceable
getSDForTable
(
Referenceable
dbRef
,
String
tableName
)
throws
Exception
{
Referenceable
tableRef
=
getTableReference
(
db
Ref
,
tableName
);
private
Referenceable
getSDForTable
(
String
dbName
,
String
tableName
)
throws
Exception
{
Referenceable
tableRef
=
getTableReference
(
db
Name
,
tableName
);
if
(
tableRef
==
null
)
{
throw
new
IllegalArgumentException
(
"Table "
+
db
Ref
+
"."
+
tableName
+
" doesn't exist"
);
throw
new
IllegalArgumentException
(
"Table "
+
db
Name
+
"."
+
tableName
+
" doesn't exist"
);
}
MetadataServiceClient
dgiClient
=
getMetadataServiceClient
();
...
...
@@ -228,7 +218,7 @@ public class HiveMetaStoreBridge {
public
Referenceable
registerTable
(
Referenceable
dbReference
,
String
dbName
,
String
tableName
)
throws
Exception
{
LOG
.
info
(
"Attempting to register table ["
+
tableName
+
"]"
);
Referenceable
tableRef
=
getTableReference
(
db
Referenc
e
,
tableName
);
Referenceable
tableRef
=
getTableReference
(
db
Nam
e
,
tableName
);
if
(
tableRef
==
null
)
{
LOG
.
info
(
"Importing objects from "
+
dbName
+
"."
+
tableName
);
...
...
addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/hook/HiveHook.java
View file @
113313e2
...
...
@@ -37,7 +37,6 @@ package org.apache.hadoop.metadata.hive.hook;
import
com.google.common.util.concurrent.ThreadFactoryBuilder
;
import
org.antlr.runtime.tree.Tree
;
import
org.apache.hadoop.hive.conf.HiveConf
;
import
org.apache.hadoop.hive.ql.QueryPlan
;
import
org.apache.hadoop.hive.ql.exec.ExplainTask
;
...
...
@@ -48,33 +47,16 @@ import org.apache.hadoop.hive.ql.hooks.HookContext;
import
org.apache.hadoop.hive.ql.hooks.ReadEntity
;
import
org.apache.hadoop.hive.ql.hooks.WriteEntity
;
import
org.apache.hadoop.hive.ql.metadata.Table
;
import
org.apache.hadoop.hive.ql.parse.ASTNode
;
import
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer
;
import
org.apache.hadoop.hive.ql.parse.HiveParser
;
import
org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHook
;
import
org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext
;
import
org.apache.hadoop.hive.ql.parse.ParseDriver
;
import
org.apache.hadoop.hive.ql.parse.SemanticException
;
import
org.apache.hadoop.hive.ql.plan.ExplainWork
;
import
org.apache.hadoop.hive.ql.plan.HiveOperation
;
import
org.apache.hadoop.metadata.MetadataServiceClient
;
import
org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge
;
import
org.apache.hadoop.metadata.hive.model.HiveDataTypes
;
import
org.apache.hadoop.metadata.typesystem.Referenceable
;
import
org.codehaus.jettison.json.JSONArray
;
import
org.codehaus.jettison.json.JSONObject
;
import
org.json.JSONObject
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.io.BufferedWriter
;
import
java.io.File
;
import
java.io.FileWriter
;
import
java.io.Serializable
;
import
java.util.ArrayList
;
import
java.util.HashMap
;
import
java.util.HashSet
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Set
;
import
java.util.concurrent.ExecutorService
;
import
java.util.concurrent.LinkedBlockingQueue
;
...
...
@@ -84,7 +66,7 @@ import java.util.concurrent.TimeUnit;
/**
* DgiHook sends lineage information to the DgiSever.
*/
public
class
HiveHook
implements
ExecuteWithHookContext
,
HiveSemanticAnalyzerHook
{
public
class
HiveHook
implements
ExecuteWithHookContext
{
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
HiveHook
.
class
);
...
...
@@ -115,17 +97,12 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
int
maxThreads
=
hiveConf
.
getInt
(
MAX_THREADS
,
maxThreadsDefault
);
long
keepAliveTime
=
hiveConf
.
getLong
(
KEEP_ALIVE_TIME
,
keepAliveTimeDefault
);
executor
=
new
ThreadPoolExecutor
(
minThreads
,
maxThreads
,
keepAliveTime
,
TimeUnit
.
MILLISECONDS
,
executor
=
new
ThreadPoolExecutor
(
minThreads
,
maxThreads
,
keepAliveTime
,
TimeUnit
.
MILLISECONDS
,
new
LinkedBlockingQueue
<
Runnable
>(),
new
ThreadFactoryBuilder
()
.
setDaemon
(
true
)
.
setNameFormat
(
"DGI Logger %d"
)
.
build
());
new
ThreadFactoryBuilder
().
setDaemon
(
true
).
setNameFormat
(
"DGI Logger %d"
).
build
());
try
{
Runtime
.
getRuntime
().
addShutdownHook
(
new
Thread
()
{
Runtime
.
getRuntime
().
addShutdownHook
(
new
Thread
()
{
@Override
public
void
run
()
{
try
{
...
...
@@ -137,8 +114,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
}
// shutdown client
}
}
);
});
}
catch
(
IllegalStateException
is
)
{
LOG
.
info
(
"Attempting to send msg while shutdown in progress."
);
}
...
...
@@ -146,6 +122,19 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
LOG
.
info
(
"Created DGI Hook"
);
}
class
HiveEvent
{
public
HiveConf
conf
;
public
Set
<
ReadEntity
>
inputs
;
public
Set
<
WriteEntity
>
outputs
;
public
String
user
;
public
HiveOperation
operation
;
public
QueryPlan
queryPlan
;
public
HookContext
.
HookType
hookType
;
public
JSONObject
jsonPlan
;
}
@Override
public
void
run
(
final
HookContext
hookContext
)
throws
Exception
{
if
(
executor
==
null
)
{
...
...
@@ -154,44 +143,53 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
}
// clone to avoid concurrent access
final
HiveEvent
event
=
new
HiveEvent
();
final
HiveConf
conf
=
new
HiveConf
(
hookContext
.
getConf
());
boolean
debug
=
conf
.
get
(
"hive.hook.dgi.synchronous"
,
"false"
).
equals
(
"true"
);
event
.
conf
=
conf
;
event
.
inputs
=
hookContext
.
getInputs
();
event
.
outputs
=
hookContext
.
getOutputs
();
event
.
user
=
hookContext
.
getUserName
()
==
null
?
hookContext
.
getUgi
().
getUserName
()
:
hookContext
.
getUserName
();
event
.
operation
=
HiveOperation
.
valueOf
(
hookContext
.
getOperationName
());
event
.
queryPlan
=
hookContext
.
getQueryPlan
();
event
.
hookType
=
hookContext
.
getHookType
();
//todo throws NPE
// event.jsonPlan = getQueryPlan(event);
event
.
jsonPlan
=
new
JSONObject
();
if
(
debug
)
{
fireAndForget
(
hookContext
,
conf
);
fireAndForget
(
event
);
}
else
{
executor
.
submit
(
new
Runnable
()
{
executor
.
submit
(
new
Runnable
()
{
@Override
public
void
run
()
{
try
{
fireAndForget
(
hookContext
,
conf
);
fireAndForget
(
event
);
}
catch
(
Throwable
e
)
{
LOG
.
info
(
"DGI hook failed"
,
e
);
}
}
}
);
});
}
}
private
void
fireAndForget
(
H
ookContext
hookContext
,
HiveConf
conf
)
throws
Exception
{
assert
hookContext
.
getHookType
()
==
HookContext
.
HookType
.
POST_EXEC_HOOK
:
"Non-POST_EXEC_HOOK not supported!"
;
private
void
fireAndForget
(
H
iveEvent
event
)
throws
Exception
{
assert
event
.
hookType
==
HookContext
.
HookType
.
POST_EXEC_HOOK
:
"Non-POST_EXEC_HOOK not supported!"
;
LOG
.
info
(
"Entered DGI hook for hook type {} operation {}"
,
hookContext
.
getHookType
(),
hookContext
.
getOperationName
());
HiveOperation
operation
=
HiveOperation
.
valueOf
(
hookContext
.
getOperationName
());
HiveMetaStoreBridge
dgiBridge
=
new
HiveMetaStoreBridge
(
conf
);
LOG
.
info
(
"Entered DGI hook for hook type {} operation {}"
,
event
.
hookType
,
event
.
operation
);
HiveMetaStoreBridge
dgiBridge
=
new
HiveMetaStoreBridge
(
event
.
conf
);
if
(!
typesRegistered
)
{
dgiBridge
.
registerHiveDataModel
();
typesRegistered
=
true
;
}
switch
(
operation
)
{
switch
(
event
.
operation
)
{
case
CREATEDATABASE:
Set
<
WriteEntity
>
outputs
=
hookContext
.
getOutputs
()
;
Set
<
WriteEntity
>
outputs
=
event
.
outputs
;
for
(
WriteEntity
entity
:
outputs
)
{
if
(
entity
.
getType
()
==
Entity
.
Type
.
DATABASE
)
{
dgiBridge
.
registerDatabase
(
entity
.
getDatabase
().
getName
());
...
...
@@ -200,7 +198,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
break
;
case
CREATETABLE:
outputs
=
hookContext
.
getOutputs
()
;
outputs
=
event
.
outputs
;
for
(
WriteEntity
entity
:
outputs
)
{
if
(
entity
.
getType
()
==
Entity
.
Type
.
TABLE
)
{
...
...
@@ -214,41 +212,35 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
break
;
case
CREATETABLE_AS_SELECT:
registerCTAS
(
dgiBridge
,
hookContext
,
conf
);
registerCTAS
(
dgiBridge
,
event
);
break
;
default
:
}
}
private
void
registerCTAS
(
HiveMetaStoreBridge
dgiBridge
,
H
ookContext
hookContext
,
HiveConf
conf
)
throws
Exception
{
Set
<
ReadEntity
>
inputs
=
hookContext
.
getInputs
()
;
Set
<
WriteEntity
>
outputs
=
hookContext
.
getOutputs
()
;
private
void
registerCTAS
(
HiveMetaStoreBridge
dgiBridge
,
H
iveEvent
event
)
throws
Exception
{
Set
<
ReadEntity
>
inputs
=
event
.
inputs
;
Set
<
WriteEntity
>
outputs
=
event
.
outputs
;
//Even explain CTAS has operation name as CREATETABLE_AS_SELECT
if
(
inputs
.
isEmpty
()
&&
outputs
.
isEmpty
())
{
LOG
.
info
(
"Explain statement. Skipping..."
);
}
//todo hookContext.getUserName() is null in hdp sandbox 2.2.4
String
user
=
hookContext
.
getUserName
()
==
null
?
System
.
getProperty
(
"user.name"
)
:
hookContext
.
getUserName
();
HiveOperation
operation
=
HiveOperation
.
valueOf
(
hookContext
.
getOperationName
());
String
queryId
=
null
;
String
queryStr
=
null
;
long
queryStartTime
=
0
;
QueryPlan
plan
=
hookContext
.
getQueryPlan
();
if
(
plan
!=
null
)
{
queryId
=
plan
.
getQueryId
();
queryStr
=
plan
.
getQueryString
();
queryStartTime
=
plan
.
getQueryStartTime
();
if
(
event
.
queryPlan
==
null
)
{
LOG
.
info
(
"Query plan is missing. Skipping..."
);
}
String
queryId
=
event
.
queryPlan
.
getQueryId
();
String
queryStr
=
event
.
queryPlan
.
getQueryStr
();
long
queryStartTime
=
event
.
queryPlan
.
getQueryStartTime
();
LOG
.
debug
(
"Registering CTAS query: {}"
,
queryStr
);
Referenceable
processReferenceable
=
new
Referenceable
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
());
processReferenceable
.
set
(
"name"
,
operation
.
getOperationName
());
processReferenceable
.
set
(
"name"
,
event
.
operation
.
getOperationName
());
processReferenceable
.
set
(
"startTime"
,
queryStartTime
);
processReferenceable
.
set
(
"userName"
,
user
);
processReferenceable
.
set
(
"userName"
,
event
.
user
);
List
<
Referenceable
>
source
=
new
ArrayList
<>();
for
(
ReadEntity
readEntity
:
inputs
)
{
if
(
readEntity
.
getTyp
()
==
Entity
.
Type
.
TABLE
)
{
...
...
@@ -269,7 +261,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
processReferenceable
.
set
(
"outputTables"
,
target
);
processReferenceable
.
set
(
"queryText"
,
queryStr
);
processReferenceable
.
set
(
"queryId"
,
queryId
);
processReferenceable
.
set
(
"queryPlan"
,
getQueryPlan
(
hookContext
,
conf
));
processReferenceable
.
set
(
"queryPlan"
,
event
.
jsonPlan
.
toString
(
));
processReferenceable
.
set
(
"endTime"
,
System
.
currentTimeMillis
());
//TODO set
...
...
@@ -278,234 +270,10 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
}
private
String
getQueryPlan
(
HookContext
hookContext
,
HiveConf
conf
)
throws
Exception
{
//We need to somehow get the sem associated with the plan and use it here.
MySemanticAnaylzer
sem
=
new
MySemanticAnaylzer
(
conf
);
QueryPlan
queryPlan
=
hookContext
.
getQueryPlan
();
sem
.
setInputs
(
queryPlan
.
getInputs
());
ExplainWork
ew
=
new
ExplainWork
(
null
,
null
,
queryPlan
.
getRootTasks
(),
queryPlan
.
getFetchTask
(),
null
,
sem
,
false
,
true
,
false
,
false
,
false
);
private
JSONObject
getQueryPlan
(
HiveEvent
event
)
throws
Exception
{
ExplainTask
explain
=
new
ExplainTask
();
explain
.
initialize
(
conf
,
queryPlan
,
null
);
org
.
json
.
JSONObject
explainPlan
=
explain
.
getJSONPlan
(
null
,
ew
);
return
explainPlan
.
toString
();
}
private
void
analyzeHiveParseTree
(
ASTNode
ast
)
{
String
astStr
=
ast
.
dump
();
Tree
tab
=
ast
.
getChild
(
0
);
String
fullTableName
;
boolean
isExternal
=
false
;
boolean
isTemporary
=
false
;
String
inputFormat
=
null
;
String
outputFormat
=
null
;
String
serde
=
null
;
String
storageHandler
=
null
;
String
likeTableName
=
null
;
String
comment
=
null
;
Tree
ctasNode
=
null
;
Tree
rowFormatNode
=
null
;
String
location
=
null
;
Map
<
String
,
String
>
serdeProps
=
new
HashMap
<>();
try
{
BufferedWriter
fw
=
new
BufferedWriter
(
new
FileWriter
(
new
File
(
"/tmp/dgi/"
,
"ASTDump"
),
true
));
fw
.
write
(
"Full AST Dump"
+
astStr
);
switch
(
ast
.
getToken
().
getType
())
{
case
HiveParser
.
TOK_CREATETABLE
:
if
(
tab
.
getType
()
!=
HiveParser
.
TOK_TABNAME
||
(
tab
.
getChildCount
()
!=
1
&&
tab
.
getChildCount
()
!=
2
))
{
LOG
.
error
(
"Ignoring malformed Create table statement"
);
}
if
(
tab
.
getChildCount
()
==
2
)
{
String
dbName
=
BaseSemanticAnalyzer
.
unescapeIdentifier
(
tab
.
getChild
(
0
).
getText
());
String
tableName
=
BaseSemanticAnalyzer
.
unescapeIdentifier
(
tab
.
getChild
(
1
).
getText
());
fullTableName
=
dbName
+
"."
+
tableName
;
}
else
{
fullTableName
=
BaseSemanticAnalyzer
.
unescapeIdentifier
(
tab
.
getChild
(
0
).
getText
());
}
LOG
.
info
(
"Creating table "
+
fullTableName
);
int
numCh
=
ast
.
getChildCount
();
for
(
int
num
=
1
;
num
<
numCh
;
num
++)
{
ASTNode
child
=
(
ASTNode
)
ast
.
getChild
(
num
);
// Handle storage format
switch
(
child
.
getToken
().
getType
())
{
case
HiveParser
.
TOK_TABLEFILEFORMAT
:
if
(
child
.
getChildCount
()
<
2
)
{
throw
new
SemanticException
(
"Incomplete specification of File Format. "
+
"You must provide InputFormat, OutputFormat."
);
}
inputFormat
=
BaseSemanticAnalyzer
.
unescapeSQLString
(
child
.
getChild
(
0
).
getText
());
outputFormat
=
BaseSemanticAnalyzer
.
unescapeSQLString
(
child
.
getChild
(
1
).
getText
());
if
(
child
.
getChildCount
()
==
3
)
{
serde
=
BaseSemanticAnalyzer
.
unescapeSQLString
(
child
.
getChild
(
2
).
getText
());
}
break
;
case
HiveParser
.
TOK_STORAGEHANDLER
:
storageHandler
=
BaseSemanticAnalyzer
.
unescapeSQLString
(
child
.
getChild
(
0
).
getText
());
if
(
child
.
getChildCount
()
==
2
)
{
BaseSemanticAnalyzer
.
readProps
(
(
ASTNode
)
(
child
.
getChild
(
1
).
getChild
(
0
)),
serdeProps
);
}
break
;
case
HiveParser
.
TOK_FILEFORMAT_GENERIC
:
ASTNode
grandChild
=
(
ASTNode
)
child
.
getChild
(
0
);
String
name
=
(
grandChild
==
null
?
""
:
grandChild
.
getText
())
.
trim
().
toUpperCase
();
if
(
name
.
isEmpty
())
{
LOG
.
error
(
"File format in STORED AS clause is empty"
);
break
;
}
break
;
}
switch
(
child
.
getToken
().
getType
())
{
case
HiveParser
.
KW_EXTERNAL
:
isExternal
=
true
;
break
;
case
HiveParser
.
KW_TEMPORARY
:
isTemporary
=
true
;
break
;
case
HiveParser
.
TOK_LIKETABLE
:
if
(
child
.
getChildCount
()
>
0
)
{
likeTableName
=
BaseSemanticAnalyzer
.
getUnescapedName
((
ASTNode
)
child
.
getChild
(
0
));
}
break
;
case
HiveParser
.
TOK_QUERY
:
ctasNode
=
child
;
break
;
case
HiveParser
.
TOK_TABLECOMMENT
:
comment
=
BaseSemanticAnalyzer
.
unescapeSQLString
(
child
.
getChild
(
0
).
getText
());
break
;
case
HiveParser
.
TOK_TABLEPARTCOLS
:
case
HiveParser
.
TOK_TABCOLLIST
:
case
HiveParser
.
TOK_ALTERTABLE_BUCKETS
:
break
;
case
HiveParser
.
TOK_TABLEROWFORMAT
:
rowFormatNode
=
child
;
break
;
case
HiveParser
.
TOK_TABLELOCATION
:
location
=
BaseSemanticAnalyzer
.
unescapeSQLString
(
child
.
getChild
(
0
).
getText
());
break
;
case
HiveParser
.
TOK_TABLEPROPERTIES
:
break
;
case
HiveParser
.
TOK_TABLESERIALIZER
:
child
=
(
ASTNode
)
child
.
getChild
(
0
);
serde
=
BaseSemanticAnalyzer
.
unescapeSQLString
(
child
.
getChild
(
0
).
getText
());
break
;
case
HiveParser
.
TOK_TABLESKEWED
:
break
;
default
:
throw
new
AssertionError
(
"Unknown token: "
+
child
.
getToken
());
}
}
StringBuilder
sb
=
new
StringBuilder
(
1024
);
sb
.
append
(
"Full table name: "
).
append
(
fullTableName
).
append
(
'\n'
);
sb
.
append
(
"\tisTemporary: "
).
append
(
isTemporary
).
append
(
'\n'
);
sb
.
append
(
"\tIsExternal: "
).
append
(
isExternal
).
append
(
'\n'
);
if
(
inputFormat
!=
null
)
{
sb
.
append
(
"\tinputFormat: "
).
append
(
inputFormat
).
append
(
'\n'
);
}
if
(
outputFormat
!=
null
)
{
sb
.
append
(
"\toutputFormat: "
).
append
(
outputFormat
).
append
(
'\n'
);
}
if
(
serde
!=
null
)
{
sb
.
append
(
"\tserde: "
).
append
(
serde
).
append
(
'\n'
);
}
if
(
storageHandler
!=
null
)
{
sb
.
append
(
"\tstorageHandler: "
).
append
(
storageHandler
).
append
(
'\n'
);
}
if
(
likeTableName
!=
null
)
{
sb
.
append
(
"\tlikeTableName: "
).
append
(
likeTableName
);
}
if
(
comment
!=
null
)
{
sb
.
append
(
"\tcomment: "
).
append
(
comment
);
}
if
(
location
!=
null
)
{
sb
.
append
(
"\tlocation: "
).
append
(
location
);
}
if
(
ctasNode
!=
null
)
{
sb
.
append
(
"\tctasNode: "
).
append
(((
ASTNode
)
ctasNode
).
dump
());
}
if
(
rowFormatNode
!=
null
)
{
sb
.
append
(
"\trowFormatNode: "
).
append
(((
ASTNode
)
rowFormatNode
).
dump
());
}
fw
.
write
(
sb
.
toString
());
}
fw
.
flush
();
fw
.
close
();
}
catch
(
Exception
e
)
{
LOG
.
error
(
"Unable to log logical plan to file"
,
e
);
}
}
private
void
parseQuery
(
String
sqlText
)
throws
Exception
{
ParseDriver
parseDriver
=
new
ParseDriver
();
ASTNode
node
=
parseDriver
.
parse
(
sqlText
);
analyzeHiveParseTree
(
node
);
}
/**
* This is an attempt to use the parser. Sematnic issues are not handled here.
* <p/>
* Trying to recompile the query runs into some issues in the preExec
* hook but we need to make sure all the semantic issues are handled. May be we should save the AST in the
* Semantic analyzer and have it available in the preExec hook so that we walk with it freely.
*
* @param context
* @param ast
* @return
* @throws SemanticException
*/
@Override
public
ASTNode
preAnalyze
(
HiveSemanticAnalyzerHookContext
context
,
ASTNode
ast
)
throws
SemanticException
{
analyzeHiveParseTree
(
ast
);
return
ast
;
}
@Override
public
void
postAnalyze
(
HiveSemanticAnalyzerHookContext
context
,
List
<
Task
<?
extends
Serializable
>>
rootTasks
)
throws
SemanticException
{
}
private
class
MySemanticAnaylzer
extends
BaseSemanticAnalyzer
{
public
MySemanticAnaylzer
(
HiveConf
conf
)
throws
SemanticException
{
super
(
conf
);
}
public
void
analyzeInternal
(
ASTNode
ast
)
throws
SemanticException
{
throw
new
RuntimeException
(
"Not implemented"
);
}
public
void
setInputs
(
HashSet
<
ReadEntity
>
inputs
)
{
this
.
inputs
=
inputs
;
}
explain
.
initialize
(
event
.
conf
,
event
.
queryPlan
,
null
);
List
<
Task
<?>>
rootTasks
=
event
.
queryPlan
.
getRootTasks
();
return
explain
.
getJSONPlan
(
null
,
null
,
rootTasks
,
event
.
queryPlan
.
getFetchTask
(),
true
,
false
,
false
);
}
}
addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/model/HiveDataModelGenerator.java
View file @
113313e2
...
...
@@ -280,6 +280,8 @@ public class HiveDataModelGenerator {
AttributeDefinition
[]
attributeDefinitions
=
new
AttributeDefinition
[]{
new
AttributeDefinition
(
"name"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"clusterName"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"description"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
OPTIONAL
,
false
,
null
),
new
AttributeDefinition
(
"locationUri"
,
DataTypes
.
STRING_TYPE
.
getName
(),
...
...
@@ -322,8 +324,6 @@ public class HiveDataModelGenerator {
AttributeDefinition
[]
attributeDefinitions
=
new
AttributeDefinition
[]{
new
AttributeDefinition
(
"name"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
//new AttributeDefinition("type", DefinedTypes.HIVE_TYPE.getName(), Multiplicity
// .REQUIRED, false, null),
new
AttributeDefinition
(
"type"
,
DataTypes
.
STRING_TYPE
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
),
new
AttributeDefinition
(
"comment"
,
DataTypes
.
STRING_TYPE
.
getName
(),
...
...
addons/hive-bridge/src/site/twiki/Bridge-Hive.twiki
View file @
113313e2
...
...
@@ -29,6 +29,10 @@ hive conf directory:
<name>hive.hook.dgi.url</name>
<value>http://localhost:21000/</value>
</property>
<property>
<name>hive.cluster.name</name>
<value>primary</value>
</property>
</verbatim>
Usage: <dgi package>/bin/import-hive.sh. The logs are in <dgi package>/logs/import-hive.log
...
...
@@ -44,12 +48,16 @@ The hook submits the request to a thread pool executor to avoid blocking the com
<value>org.apache.hadoop.metadata.hive.hook.HiveHook</value>
</property>
</verbatim>
* Add the following propert
y
in hive-ste.xml with the DGI endpoint for your set-up
* Add the following propert
ies
in hive-ste.xml with the DGI endpoint for your set-up
<verbatim>
<property>
<name>hive.hook.dgi.url</name>
<value>http://localhost:21000/</value>
</property>
<property>
<name>hive.cluster.name</name>
<value>primary</value>
</property>
</verbatim>
* Add 'export HIVE_AUX_JARS_PATH=<dgi package>/hook/hive' in hive-env.sh
...
...
addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java
View file @
113313e2
...
...
@@ -24,16 +24,15 @@ import org.apache.hadoop.hive.ql.Driver;
import
org.apache.hadoop.hive.ql.session.SessionState
;
import
org.apache.hadoop.metadata.MetadataServiceClient
;
import
org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge
;
import
org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator
;
import
org.apache.hadoop.metadata.hive.model.HiveDataTypes
;
import
org.codehaus.jettison.json.JSONArray
;
import
org.codehaus.jettison.json.JSONObject
;
import
org.testng.Assert
;
import
org.testng.annotations.BeforeClass
;
import
org.testng.annotations.Test
;
public
class
HiveHookIT
{
private
static
final
String
DGI_URL
=
"http://localhost:21000/"
;
private
static
final
String
CLUSTER_NAME
=
"test"
;
private
Driver
driver
;
private
MetadataServiceClient
dgiCLient
;
private
SessionState
ss
;
...
...
@@ -59,6 +58,7 @@ public class HiveHookIT {
hiveConf
.
set
(
HiveMetaStoreBridge
.
DGI_URL_PROPERTY
,
DGI_URL
);
hiveConf
.
set
(
"javax.jdo.option.ConnectionURL"
,
"jdbc:derby:./target/metastore_db;create=true"
);
hiveConf
.
set
(
"hive.hook.dgi.synchronous"
,
"true"
);
hiveConf
.
set
(
HiveMetaStoreBridge
.
HIVE_CLUSTER_NAME
,
CLUSTER_NAME
);
return
hiveConf
;
}
...
...
@@ -82,11 +82,11 @@ public class HiveHookIT {
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
dbName
+
"."
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
assertTableIsRegistered
(
dbName
,
tableName
);
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
assertTableIsRegistered
(
"default"
,
tableName
);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered
(
"default"
);
...
...
@@ -97,24 +97,33 @@ public class HiveHookIT {
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
String
new
TableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
String
query
=
"create table "
+
new
TableName
+
" as select * from "
+
tableName
;
String
ctas
TableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
String
query
=
"create table "
+
ctas
TableName
+
" as select * from "
+
tableName
;
runCommand
(
query
);
assertTableIsRegistered
(
new
TableName
);
assert
InstanceIsRegistered
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
"queryText"
,
query
);
assertTableIsRegistered
(
"default"
,
ctas
TableName
);
assert
ProcessIsRegistered
(
query
);
}
private
void
assertTableIsRegistered
(
String
tableName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
"name"
,
tableName
);
private
void
assertProcessIsRegistered
(
String
queryStr
)
throws
Exception
{
String
dslQuery
=
String
.
format
(
"%s where queryText = '%s'"
,
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
queryStr
);
assertInstanceIsRegistered
(
dslQuery
);
}
private
void
assertTableIsRegistered
(
String
dbName
,
String
tableName
)
throws
Exception
{
String
query
=
String
.
format
(
"%s where name = '%s', dbName where name = '%s' and clusterName = '%s'"
,
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
tableName
,
dbName
,
CLUSTER_NAME
);
assertInstanceIsRegistered
(
query
);
}
private
void
assertDatabaseIsRegistered
(
String
dbName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_DB
.
getName
(),
"name"
,
dbName
);
String
query
=
String
.
format
(
"%s where name = '%s' and clusterName = '%s'"
,
HiveDataTypes
.
HIVE_DB
.
getName
(),
dbName
,
CLUSTER_NAME
);
assertInstanceIsRegistered
(
query
);
}
private
void
assertInstanceIsRegistered
(
String
typeName
,
String
colName
,
String
colValue
)
throws
Exception
{
JSONArray
results
=
dgiCLient
.
rawSearch
(
typeName
,
colName
,
colValue
);
private
void
assertInstanceIsRegistered
(
String
dslQuery
)
throws
Exception
{
JSONArray
results
=
dgiCLient
.
searchByDSL
(
dslQuery
);
Assert
.
assertEquals
(
results
.
length
(),
1
);
}
}
addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/SSLAndKerberosHiveHookIT.java
View file @
113313e2
...
...
@@ -205,40 +205,6 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
assertDatabaseIsRegistered
(
dbName
);
}
@Test
public
void
testCreateTable
()
throws
Exception
{
String
dbName
=
"db"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create database "
+
dbName
);
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
dbName
+
"."
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered
(
"default"
);
}
@Test
public
void
testCTAS
()
throws
Exception
{
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
String
newTableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
String
query
=
"create table "
+
newTableName
+
" as select * from "
+
tableName
;
runCommand
(
query
);
assertTableIsRegistered
(
newTableName
);
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
"queryText"
,
query
);
}
private
void
assertTableIsRegistered
(
String
tableName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
"name"
,
tableName
);
}
private
void
assertDatabaseIsRegistered
(
String
dbName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_DB
.
getName
(),
"name"
,
dbName
);
}
...
...
addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/SSLHiveHookIT.java
View file @
113313e2
...
...
@@ -208,40 +208,6 @@ public class SSLHiveHookIT {
assertDatabaseIsRegistered
(
dbName
);
}
@Test
public
void
testCreateTable
()
throws
Exception
{
String
dbName
=
"db"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create database "
+
dbName
);
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
dbName
+
"."
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
assertTableIsRegistered
(
tableName
);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered
(
"default"
);
}
@Test
public
void
testCTAS
()
throws
Exception
{
String
tableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
runCommand
(
"create table "
+
tableName
+
"(id int, name string)"
);
String
newTableName
=
"table"
+
RandomStringUtils
.
randomAlphanumeric
(
5
).
toLowerCase
();
String
query
=
"create table "
+
newTableName
+
" as select * from "
+
tableName
;
runCommand
(
query
);
assertTableIsRegistered
(
newTableName
);
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
"queryText"
,
query
);
}
private
void
assertTableIsRegistered
(
String
tableName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
"name"
,
tableName
);
}
private
void
assertDatabaseIsRegistered
(
String
dbName
)
throws
Exception
{
assertInstanceIsRegistered
(
HiveDataTypes
.
HIVE_DB
.
getName
(),
"name"
,
dbName
);
}
...
...
client/src/main/java/org/apache/hadoop/metadata/MetadataServiceClient.java
View file @
113313e2
...
...
@@ -55,6 +55,7 @@ public class MetadataServiceClient {
public
static
final
String
DEFINITION
=
"definition"
;
public
static
final
String
ERROR
=
"error"
;
public
static
final
String
STACKTRACE
=
"stackTrace"
;
public
static
final
String
REQUEST_ID
=
"requestId"
;
public
static
final
String
RESULTS
=
"results"
;
public
static
final
String
COUNT
=
"count"
;
...
...
repository/src/main/java/org/apache/hadoop/metadata/repository/typestore/GraphBackedTypeStore.java
View file @
113313e2
...
...
@@ -53,8 +53,8 @@ import java.util.Iterator;
import
java.util.List
;
public
class
GraphBackedTypeStore
implements
ITypeStore
{
public
static
final
String
VERTEX_TYPE
=
"typeSystem"
;
private
static
final
String
PROPERTY_PREFIX
=
"type."
;
public
static
final
String
VERTEX_TYPE
=
Constants
.
INTERNAL_PROPERTY_KEY_PREFIX
+
"typeSystem"
;
private
static
final
String
PROPERTY_PREFIX
=
Constants
.
INTERNAL_PROPERTY_KEY_PREFIX
+
"type."
;
public
static
final
String
SUPERTYPE_EDGE_LABEL
=
PROPERTY_PREFIX
+
".supertype"
;
public
static
final
String
SUBTYPE_EDGE_LABEL
=
PROPERTY_PREFIX
+
".subtype"
;
...
...
@@ -188,7 +188,7 @@ public class GraphBackedTypeStore implements ITypeStore {
break
;
default
:
throw
new
IllegalArgumentException
(
"
Unhandled type category
"
+
attrDataType
.
getTypeCategory
());
throw
new
IllegalArgumentException
(
"
Attribute cannot reference instances of type :
"
+
attrDataType
.
getTypeCategory
());
}
for
(
IDataType
attrType
:
attrDataTypes
)
{
...
...
typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/EnumType.java
View file @
113313e2
...
...
@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.typesystem.types;
import
com.google.common.collect.ImmutableCollection
;
import
com.google.common.collect.ImmutableMap
;
import
org.apache.hadoop.metadata.MetadataException
;
import
scala.math.BigInt
;
public
class
EnumType
extends
AbstractDataType
<
EnumValue
>
{
...
...
@@ -54,7 +55,7 @@ public class EnumType extends AbstractDataType<EnumValue> {
EnumValue
e
=
null
;
if
(
val
instanceof
EnumValue
)
{
e
=
valueMap
.
get
(((
EnumValue
)
val
).
value
);
}
else
if
(
val
instanceof
Integer
)
{
}
else
if
(
val
instanceof
Integer
||
val
instanceof
BigInt
)
{
e
=
ordinalMap
.
get
(
val
);
}
else
if
(
val
instanceof
String
)
{
e
=
valueMap
.
get
(
val
);
...
...
typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/json/TypesSerialization.scala
View file @
113313e2
...
...
@@ -126,7 +126,7 @@ object TypesSerialization {
private
def
convertAttributeInfoToAttributeDef
(
aInfo
:
AttributeInfo
)
=
{
new
AttributeDefinition
(
aInfo
.
name
,
aInfo
.
dataType
().
getName
,
aInfo
.
multiplicity
,
aInfo
.
isComposite
,
aInfo
.
reverseAttributeName
)
aInfo
.
isComposite
,
aInfo
.
isUnique
,
aInfo
.
isIndexable
,
aInfo
.
reverseAttributeName
)
}
private
def
convertEnumTypeToEnumTypeDef
(
et
:
EnumType
)
=
{
...
...
webapp/src/main/java/org/apache/hadoop/metadata/web/util/Servlets.java
View file @
113313e2
...
...
@@ -30,6 +30,7 @@ import javax.servlet.http.HttpServletRequest;
import
javax.ws.rs.core.MediaType
;
import
javax.ws.rs.core.Response
;
import
java.io.IOException
;
import
java.io.PrintWriter
;
import
java.io.StringWriter
;
/**
...
...
@@ -98,7 +99,20 @@ public final class Servlets {
}
public
static
Response
getErrorResponse
(
Throwable
e
,
Response
.
Status
status
)
{
return
getErrorResponse
(
e
.
getMessage
(),
status
);
Response
response
=
getErrorResponse
(
e
.
getMessage
(),
status
);
JSONObject
responseJson
=
(
JSONObject
)
response
.
getEntity
();
try
{
responseJson
.
put
(
MetadataServiceClient
.
STACKTRACE
,
printStackTrace
(
e
));
}
catch
(
JSONException
e1
)
{
LOG
.
warn
(
"Could not construct error Json rensponse"
,
e1
);
}
return
response
;
}
private
static
String
printStackTrace
(
Throwable
t
)
{
StringWriter
sw
=
new
StringWriter
();
t
.
printStackTrace
(
new
PrintWriter
(
sw
));
return
sw
.
toString
();
}
public
static
Response
getErrorResponse
(
String
message
,
Response
.
Status
status
)
{
...
...
@@ -108,7 +122,7 @@ public final class Servlets {
errorJson
.
put
(
MetadataServiceClient
.
ERROR
,
errorEntity
);
errorEntity
=
errorJson
;
}
catch
(
JSONException
jsonE
)
{
LOG
.
warn
(
"Could not construct error Json rensponse"
);
LOG
.
warn
(
"Could not construct error Json rensponse"
,
jsonE
);
}
return
Response
.
status
(
status
)
...
...
webapp/src/test/java/org/apache/hadoop/metadata/web/resources/EntityJerseyResourceIT.java
View file @
113313e2
...
...
@@ -199,6 +199,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
JSONObject
response
=
new
JSONObject
(
responseAsString
);
Assert
.
assertNotNull
(
response
.
get
(
MetadataServiceClient
.
ERROR
));
Assert
.
assertNotNull
(
response
.
get
(
MetadataServiceClient
.
STACKTRACE
));
}
@Test
(
dependsOnMethods
=
"testSubmitEntity"
)
...
...
@@ -238,6 +239,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
JSONObject
response
=
new
JSONObject
(
responseAsString
);
Assert
.
assertNotNull
(
response
.
get
(
MetadataServiceClient
.
ERROR
));
Assert
.
assertNotNull
(
response
.
get
(
MetadataServiceClient
.
STACKTRACE
));
}
@Test
...
...
@@ -395,6 +397,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
JSONObject
response
=
new
JSONObject
(
responseAsString
);
Assert
.
assertNotNull
(
response
.
get
(
MetadataServiceClient
.
ERROR
));
Assert
.
assertNotNull
(
response
.
get
(
MetadataServiceClient
.
STACKTRACE
));
}
private
void
createHiveTypes
()
throws
Exception
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment