Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
255fa079
Commit
255fa079
authored
Jan 14, 2015
by
Venkatesh Seetharam
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
ISSUE-28 Add types resource for submit. Contributed by Venkatesh Seetharam
parent
29d3299d
Hide whitespace changes
Inline
Side-by-side
Showing
19 changed files
with
508 additions
and
155 deletions
+508
-155
Service.java
...main/java/org/apache/hadoop/metadata/service/Service.java
+2
-0
RepositoryMetadataModule.java
.../org/apache/hadoop/metadata/RepositoryMetadataModule.java
+12
-6
TypesChangeListener.java
.../apache/hadoop/metadata/listener/TypesChangeListener.java
+48
-0
DefaultMetadataService.java
...ache/hadoop/metadata/services/DefaultMetadataService.java
+97
-14
GraphBackedMetadataRepository.java
...doop/metadata/services/GraphBackedMetadataRepository.java
+15
-18
MetadataService.java
.../org/apache/hadoop/metadata/services/MetadataService.java
+6
-1
indexer.properties
repository/src/main/resources/indexer.properties
+0
-0
GraphBackedMetadataRepositoryTest.java
.../metadata/services/GraphBackedMetadataRepositoryTest.java
+5
-4
application.properties
repository/src/test/resources/application.properties
+13
-0
pom.xml
webapp/pom.xml
+2
-0
EntityResource.java
.../apache/hadoop/metadata/web/resources/EntityResource.java
+22
-80
TypesResource.java
...g/apache/hadoop/metadata/web/resources/TypesResource.java
+75
-20
Servlets.java
...in/java/org/apache/hadoop/metadata/web/util/Servlets.java
+9
-0
application.properties
webapp/src/main/resources/application.properties
+41
-0
web.xml
webapp/src/main/webapp/WEB-INF/web.xml
+0
-3
BaseResourceIT.java
.../apache/hadoop/metadata/web/resources/BaseResourceIT.java
+25
-0
EntityJerseyResourceIT.java
...hadoop/metadata/web/resources/EntityJerseyResourceIT.java
+5
-4
RexsterGraphJerseyResourceIT.java
.../metadata/web/resources/RexsterGraphJerseyResourceIT.java
+1
-5
TypesJerseyResourceIT.java
.../hadoop/metadata/web/resources/TypesJerseyResourceIT.java
+130
-0
No files found.
common/src/main/java/org/apache/hadoop/metadata/service/Service.java
View file @
255fa079
...
...
@@ -24,6 +24,8 @@ import java.io.IOException;
/**
* Service interface that's initialized at startup.
*/
//todo: needs to be removed, as it serves no purpose now with Guice
@Deprecated
public
interface
Service
extends
Closeable
{
/**
...
...
repository/src/main/java/org/apache/hadoop/metadata/RepositoryMetadataModule.java
View file @
255fa079
...
...
@@ -24,18 +24,18 @@
*/
package
org
.
apache
.
hadoop
.
metadata
;
import
com.google.inject.Scopes
;
import
com.google.inject.throwingproviders.ThrowingProviderBinder
;
import
com.thinkaurelius.titan.core.TitanGraph
;
import
org.apache.hadoop.metadata.services.DefaultMetadataService
;
import
org.apache.hadoop.metadata.services.GraphBackedMetadataRepository
;
import
org.apache.hadoop.metadata.services.GraphProvider
;
import
org.apache.hadoop.metadata.services.GraphService
;
import
org.apache.hadoop.metadata.services.GraphServiceConfigurator
;
import
org.apache.hadoop.metadata.services.MetadataRepository
;
import
org.apache.hadoop.metadata.services.MetadataService
;
import
org.apache.hadoop.metadata.services.TitanGraphProvider
;
import
org.apache.hadoop.metadata.storage.IRepository
;
import
org.apache.hadoop.metadata.storage.memory.MemRepository
;
import
com.google.inject.Scopes
;
import
com.google.inject.throwingproviders.ThrowingProviderBinder
;
import
com.thinkaurelius.titan.core.TitanGraph
;
/**
* Guice module for Repository module.
...
...
@@ -46,6 +46,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
private
Class
<?
extends
GraphService
>
graphServiceClass
;
// MetadataRepositoryService implementation class
private
Class
<?
extends
MetadataRepository
>
metadataRepoClass
;
private
Class
<?
extends
MetadataService
>
metadataService
;
public
RepositoryMetadataModule
()
{
GraphServiceConfigurator
gsp
=
new
GraphServiceConfigurator
();
...
...
@@ -53,6 +54,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// get the impl classes for the repo and the graph service
this
.
graphServiceClass
=
gsp
.
getImplClass
();
this
.
metadataRepoClass
=
GraphBackedMetadataRepository
.
class
;
this
.
metadataService
=
DefaultMetadataService
.
class
;
}
protected
void
configure
()
{
...
...
@@ -66,7 +68,11 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// bind the MetadataRepositoryService interface to an implementation
bind
(
MetadataRepository
.
class
).
to
(
metadataRepoClass
);
// bind the GraphService interface to an implementation
bind
(
GraphService
.
class
).
to
(
graphServiceClass
);
// bind the MetadataService interface to an implementation
bind
(
MetadataService
.
class
).
to
(
metadataService
);
}
}
repository/src/main/java/org/apache/hadoop/metadata/listener/TypesChangeListener.java
0 → 100644
View file @
255fa079
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
listener
;
import
org.apache.hadoop.metadata.MetadataException
;
import
org.apache.hadoop.metadata.types.IDataType
;
/**
* Types change notification listener.
*/
public
interface
TypesChangeListener
{
/**
* This is upon adding a new type to Store.
*
* @param typeName type name
* @param dataType data type
* @throws MetadataException
*/
void
onAdd
(
String
typeName
,
IDataType
dataType
)
throws
MetadataException
;
/**
* This is upon removing an existing type from the Store.
*
* @param typeName type name
* @throws MetadataException
*/
// void onRemove(String typeName) throws MetadataException;
// This is upon updating an existing type to the store
// void onChange() throws MetadataException;
}
repository/src/main/java/org/apache/hadoop/metadata/services/DefaultMetadataService.java
View file @
255fa079
...
...
@@ -18,26 +18,39 @@
package
org
.
apache
.
hadoop
.
metadata
.
services
;
import
java.io.IOException
;
import
java.util.List
;
import
javax.inject.Inject
;
import
com.google.common.base.Preconditions
;
import
org.apache.hadoop.metadata.ITypedReferenceableInstance
;
import
org.apache.hadoop.metadata.MetadataException
;
import
org.apache.hadoop.metadata.TypesDef
;
import
org.apache.hadoop.metadata.json.Serialization
$
;
import
org.apache.hadoop.metadata.json.TypesSerialization
;
import
org.apache.hadoop.metadata.listener.TypesChangeListener
;
import
org.apache.hadoop.metadata.types.IDataType
;
import
org.apache.hadoop.metadata.types.TypeSystem
;
import
org.codehaus.jettison.json.JSONException
;
import
org.codehaus.jettison.json.JSONObject
;
import
org.json.simple.JSONValue
;
import
org.json.simple.parser.ParseException
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
javax.inject.Inject
;
import
java.io.IOException
;
import
java.util.LinkedHashSet
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Set
;
public
class
DefaultMetadataService
implements
MetadataService
{
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
DefaultMetadataService
.
class
);
private
final
Set
<
TypesChangeListener
>
typesChangeListeners
=
new
LinkedHashSet
<>();
private
final
TypeSystem
typeSystem
;
private
final
MetadataRepository
repository
;
@Inject
DefaultMetadataService
(
MetadataRepository
repository
)
throws
MetadataException
{
this
.
typeSystem
=
new
TypeSystem
();
...
...
@@ -53,8 +66,46 @@ public class DefaultMetadataService implements MetadataService {
* @return a unique id for this type
*/
@Override
public
String
createType
(
String
typeName
,
String
typeDefinition
)
throws
MetadataException
{
return
null
;
public
JSONObject
createType
(
String
typeName
,
String
typeDefinition
)
throws
MetadataException
{
try
{
validate
(
typeName
,
typeDefinition
);
TypesDef
typesDef
=
TypesSerialization
.
fromJson
(
typeDefinition
);
Map
<
String
,
IDataType
>
typesAdded
=
typeSystem
.
defineTypes
(
typesDef
);
onAdd
(
typesAdded
);
JSONObject
response
=
new
JSONObject
();
for
(
Map
.
Entry
<
String
,
IDataType
>
entry
:
typesAdded
.
entrySet
())
{
response
.
put
(
entry
.
getKey
(),
entry
.
getValue
().
getName
());
}
return
response
;
}
catch
(
ParseException
e
)
{
throw
new
MetadataException
(
"validation failed for: "
+
typeName
);
}
catch
(
JSONException
e
)
{
throw
new
MetadataException
(
"Unable to create response for: "
+
typeName
);
}
}
private
void
validate
(
String
typeName
,
String
typeDefinition
)
throws
ParseException
,
MetadataException
{
Preconditions
.
checkNotNull
(
typeName
,
"type name cannot be null"
);
Preconditions
.
checkNotNull
(
typeDefinition
,
"type definition cannot be null"
);
JSONValue
.
parseWithException
(
typeDefinition
);
// verify if the type already exists
String
existingTypeDefinition
=
null
;
try
{
existingTypeDefinition
=
getTypeDefinition
(
typeName
);
}
catch
(
MetadataException
ignore
)
{
// do nothing
}
if
(
existingTypeDefinition
!=
null
)
{
throw
new
MetadataException
(
"type is already defined for : "
+
typeName
);
}
}
/**
...
...
@@ -65,7 +116,8 @@ public class DefaultMetadataService implements MetadataService {
*/
@Override
public
String
getTypeDefinition
(
String
typeName
)
throws
MetadataException
{
return
null
;
final
IDataType
dataType
=
typeSystem
.
getDataType
(
IDataType
.
class
,
typeName
);
return
TypesSerialization
.
toJson
(
typeSystem
,
dataType
.
getName
());
}
/**
...
...
@@ -75,7 +127,7 @@ public class DefaultMetadataService implements MetadataService {
*/
@Override
public
List
<
String
>
getTypeNamesList
()
throws
MetadataException
{
return
null
;
return
typeSystem
.
getTypeNames
()
;
}
/**
...
...
@@ -88,9 +140,21 @@ public class DefaultMetadataService implements MetadataService {
@Override
public
String
createEntity
(
String
entityType
,
String
entityDefinition
)
throws
MetadataException
{
ITypedReferenceableInstance
entityInstance
=
Serialization
$
.
MODULE
$
.
fromJson
(
entityDefinition
);
return
repository
.
createEntity
(
entityInstance
,
entityType
);
try
{
validateEntity
(
entityDefinition
,
entityType
);
ITypedReferenceableInstance
entityInstance
=
Serialization
$
.
MODULE
$
.
fromJson
(
entityDefinition
);
return
repository
.
createEntity
(
entityInstance
,
entityType
);
}
catch
(
ParseException
e
)
{
throw
new
MetadataException
(
"validation failed for: "
+
entityType
);
}
}
private
void
validateEntity
(
String
entity
,
String
entityType
)
throws
ParseException
{
Preconditions
.
checkNotNull
(
entity
,
"entity cannot be null"
);
Preconditions
.
checkNotNull
(
entityType
,
"entity type cannot be null"
);
JSONValue
.
parseWithException
(
entity
);
}
/**
...
...
@@ -101,7 +165,9 @@ public class DefaultMetadataService implements MetadataService {
*/
@Override
public
String
getEntityDefinition
(
String
guid
)
throws
MetadataException
{
return
null
;
final
ITypedReferenceableInstance
instance
=
repository
.
getEntityDefinition
(
guid
);
return
Serialization
$
.
MODULE
$
.
toJson
(
instance
);
}
/**
...
...
@@ -129,6 +195,22 @@ public class DefaultMetadataService implements MetadataService {
throw
new
UnsupportedOperationException
();
}
private
void
onAdd
(
Map
<
String
,
IDataType
>
typesAdded
)
throws
MetadataException
{
for
(
TypesChangeListener
listener
:
typesChangeListeners
)
{
for
(
Map
.
Entry
<
String
,
IDataType
>
entry
:
typesAdded
.
entrySet
())
{
listener
.
onAdd
(
entry
.
getKey
(),
entry
.
getValue
());
}
}
}
public
void
registerListener
(
TypesChangeListener
listener
)
{
typesChangeListeners
.
add
(
listener
);
}
public
void
unregisterListener
(
TypesChangeListener
listener
)
{
typesChangeListeners
.
remove
(
listener
);
}
/**
* Starts the service. This method blocks until the service has completely started.
*
...
...
@@ -143,6 +225,7 @@ public class DefaultMetadataService implements MetadataService {
*/
@Override
public
void
stop
()
{
// do nothing
}
/**
...
...
repository/src/main/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepository.java
View file @
255fa079
...
...
@@ -18,18 +18,10 @@
package
org
.
apache
.
hadoop
.
metadata
.
services
;
import
java.io.IOException
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.UUID
;
import
java.util.concurrent.ConcurrentHashMap
;
import
java.util.concurrent.atomic.AtomicInteger
;
import
javax.inject.Inject
;
import
com.tinkerpop.blueprints.Direction
;
import
com.tinkerpop.blueprints.Edge
;
import
com.tinkerpop.blueprints.TransactionalGraph
;
import
com.tinkerpop.blueprints.Vertex
;
import
org.apache.hadoop.metadata.IReferenceableInstance
;
import
org.apache.hadoop.metadata.ITypedInstance
;
import
org.apache.hadoop.metadata.ITypedReferenceableInstance
;
...
...
@@ -48,10 +40,16 @@ import org.apache.hadoop.metadata.types.TypeSystem;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
com.tinkerpop.blueprints.Direction
;
import
com.tinkerpop.blueprints.Edge
;
import
com.tinkerpop.blueprints.TransactionalGraph
;
import
com.tinkerpop.blueprints.Vertex
;
import
java.io.IOException
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.UUID
;
import
java.util.concurrent.ConcurrentHashMap
;
import
java.util.concurrent.atomic.AtomicInteger
;
import
javax.inject.Inject
;
/**
* An implementation backed by a Graph database provided
...
...
@@ -61,7 +59,6 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
GraphBackedMetadataRepository
.
class
);
public
static
final
String
NAME
=
GraphBackedMetadataRepository
.
class
.
getSimpleName
();
private
static
final
String
GUID_PROPERTY_KEY
=
"guid"
;
private
static
final
String
TIMESTAMP_PROPERTY_KEY
=
"timestamp"
;
...
...
@@ -71,7 +68,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
private
final
AtomicInteger
ID_SEQ
=
new
AtomicInteger
(
0
);
// private ConcurrentHashMap<String, ITypedReferenceableInstance> types;
// todo: remove this
private
final
ConcurrentHashMap
<
String
,
ITypedReferenceableInstance
>
instances
;
private
final
GraphService
graphService
;
...
...
repository/src/main/java/org/apache/hadoop/metadata/services/MetadataService.java
View file @
255fa079
...
...
@@ -20,9 +20,13 @@ package org.apache.hadoop.metadata.services;
import
org.apache.hadoop.metadata.MetadataException
;
import
org.apache.hadoop.metadata.service.Service
;
import
org.codehaus.jettison.json.JSONObject
;
import
java.util.List
;
/**
* Metadata service.
*/
public
interface
MetadataService
extends
Service
{
/**
...
...
@@ -33,7 +37,8 @@ public interface MetadataService extends Service {
* @param typeDefinition definition as json
* @return a unique id for this type
*/
String
createType
(
String
typeName
,
String
typeDefinition
)
throws
MetadataException
;
JSONObject
createType
(
String
typeName
,
String
typeDefinition
)
throws
MetadataException
;
/**
* Return the definition for the given type.
...
...
repository/src/
test
/resources/indexer.properties
→
repository/src/
main
/resources/indexer.properties
View file @
255fa079
File moved
repository/src/test/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryTest.java
View file @
255fa079
...
...
@@ -30,6 +30,7 @@ import com.tinkerpop.blueprints.Direction;
import
com.tinkerpop.blueprints.Edge
;
import
com.tinkerpop.blueprints.Vertex
;
@Test
(
enabled
=
false
)
public
class
GraphBackedMetadataRepositoryTest
extends
RepositoryModuleBaseTest
{
private
static
final
String
ENTITY_TYPE
=
"hive-table"
;
...
...
@@ -58,7 +59,7 @@ public class GraphBackedMetadataRepositoryTest extends RepositoryModuleBaseTest
public
void
tearDown
()
throws
Exception
{
}
@Test
@Test
(
enabled
=
false
)
public
void
testSubmitEntity
()
throws
Exception
{
Referenceable
hrDept
=
createDeptEg1
(
ts
);
ClassType
deptType
=
ts
.
getDataType
(
ClassType
.
class
,
"Department"
);
...
...
@@ -81,19 +82,19 @@ public class GraphBackedMetadataRepositoryTest extends RepositoryModuleBaseTest
}
}
@Test
(
dependsOnMethods
=
"testSubmitEntity"
)
@Test
(
dependsOnMethods
=
"testSubmitEntity"
,
enabled
=
false
)
public
void
testGetEntityDefinition
()
throws
Exception
{
ITypedReferenceableInstance
entity
=
repositoryService
.
getEntityDefinition
(
guid
);
Assert
.
assertNotNull
(
entity
);
}
@Test
@Test
(
enabled
=
false
)
public
void
testGetEntityDefinitionNonExistent
()
throws
Exception
{
ITypedReferenceableInstance
entity
=
repositoryService
.
getEntityDefinition
(
"blah"
);
Assert
.
assertNull
(
entity
);
}
@Test
@Test
(
enabled
=
false
)
public
void
testGetEntityList
()
throws
Exception
{
List
<
String
>
entityList
=
repositoryService
.
getEntityList
(
ENTITY_TYPE
);
Assert
.
assertNotNull
(
entityList
);
...
...
repository/src/test/resources/application.properties
View file @
255fa079
...
...
@@ -19,4 +19,17 @@
# GraphService implementation
metadata.graph.impl.class
=
org.apache.hadoop.metadata.services.TitanGraphService
# Graph implementation
#metadata.graph.blueprints.graph=com.thinkaurelius.titan.core.TitanFactory
# Graph Storage
metadata.graph.storage.backend
=
inmemory
# Graph Search Index
metadata.graph.index.search.backend
=
elasticsearch
metadata.graph.index.search.directory
=
target/data/es
metadata.graph.index.search.elasticsearch.client-only
=
false
metadata.graph.index.search.elasticsearch.local-mode
=
true
metadata.enableTLS
=
false
webapp/pom.xml
View file @
255fa079
...
...
@@ -243,6 +243,7 @@
<configuration>
<skip>
${skipITs}
</skip>
<!--only skip int tests -->
<connectors>
<!--
<connector implementation="org.mortbay.jetty.security.SslSocketConnector">
<port>21443</port>
<maxIdleTime>60000</maxIdleTime>
...
...
@@ -250,6 +251,7 @@
<keyPassword>metadata-passwd</keyPassword>
<password>metadata-passwd</password>
</connector>
-->
<connector
implementation=
"org.mortbay.jetty.nio.SelectChannelConnector"
>
<port>
21000
</port>
<maxIdleTime>
60000
</maxIdleTime>
...
...
webapp/src/main/java/org/apache/hadoop/metadata/web/resources/EntityResource.java
View file @
255fa079
...
...
@@ -18,14 +18,17 @@
package
org
.
apache
.
hadoop
.
metadata
.
web
.
resources
;
import
java.io.IOException
;
import
java.io.StringWriter
;
import
com.google.common.base.Preconditions
;
import
org.apache.hadoop.metadata.services.MetadataService
;
import
org.apache.hadoop.metadata.web.util.Servlets
;
import
org.codehaus.jettison.json.JSONObject
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
javax.inject.Inject
;
import
javax.inject.Singleton
;
import
javax.servlet.http.HttpServletRequest
;
import
javax.ws.rs.Consumes
;
import
javax.ws.rs.DELETE
;
import
javax.ws.rs.DefaultValue
;
import
javax.ws.rs.GET
;
import
javax.ws.rs.POST
;
...
...
@@ -38,18 +41,6 @@ import javax.ws.rs.core.Context;
import
javax.ws.rs.core.MediaType
;
import
javax.ws.rs.core.Response
;
import
org.apache.commons.io.IOUtils
;
import
org.apache.hadoop.metadata.MetadataException
;
import
org.apache.hadoop.metadata.services.MetadataService
;
import
org.apache.hadoop.metadata.web.util.Servlets
;
import
org.codehaus.jettison.json.JSONObject
;
import
org.json.simple.JSONValue
;
import
org.json.simple.parser.ParseException
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
com.google.common.base.Preconditions
;
/**
* Entity management operations as REST API.
*
...
...
@@ -68,7 +59,7 @@ public class EntityResource {
* Created by the Guice ServletModule and injected with the
* configured MetadataService.
*
* @param metadataService
* @param metadataService
metadata service handle
*/
@Inject
public
EntityResource
(
MetadataService
metadataService
)
{
...
...
@@ -82,13 +73,13 @@ public class EntityResource {
public
Response
submit
(
@Context
HttpServletRequest
request
,
@PathParam
(
"entityType"
)
final
String
entityType
)
{
try
{
final
String
entity
=
getEntity
(
request
,
entityType
);
final
String
entity
=
Servlets
.
getRequestPayload
(
request
);
System
.
out
.
println
(
"entity = "
+
entity
);
validateEntity
(
entity
,
entityType
);
final
String
guid
=
metadataService
.
createEntity
(
entity
,
entityType
);
JSONObject
response
=
new
JSONObject
();
response
.
put
(
"GUID"
,
guid
);
response
.
put
(
"requestId"
,
Thread
.
currentThread
().
getName
());
return
Response
.
ok
(
response
).
build
();
}
catch
(
Exception
e
)
{
...
...
@@ -97,19 +88,6 @@ public class EntityResource {
}
}
private
String
getEntity
(
HttpServletRequest
request
,
String
entityType
)
throws
IOException
{
StringWriter
writer
=
new
StringWriter
();
IOUtils
.
copy
(
request
.
getInputStream
(),
writer
);
return
writer
.
toString
();
}
private
void
validateEntity
(
String
entity
,
String
entityType
)
throws
ParseException
{
Preconditions
.
checkNotNull
(
entity
,
"entity cannot be null"
);
Preconditions
.
checkNotNull
(
entityType
,
"entity type cannot be null"
);
JSONValue
.
parseWithException
(
entity
);
}
@GET
@Path
(
"definition/{guid}"
)
@Produces
(
MediaType
.
APPLICATION_JSON
)
...
...
@@ -118,10 +96,19 @@ public class EntityResource {
try
{
final
String
entityDefinition
=
metadataService
.
getEntityDefinition
(
guid
);
return
(
entityDefinition
==
null
)
?
Response
.
status
(
Response
.
Status
.
NOT_FOUND
).
build
()
:
Response
.
ok
(
entityDefinition
).
build
();
}
catch
(
MetadataException
e
)
{
JSONObject
response
=
new
JSONObject
();
response
.
put
(
"requestId"
,
Thread
.
currentThread
().
getName
());
Response
.
Status
status
=
Response
.
Status
.
NOT_FOUND
;
if
(
entityDefinition
!=
null
)
{
response
.
put
(
"definition"
,
entityDefinition
);
status
=
Response
.
Status
.
OK
;
}
return
Response
.
status
(
status
).
entity
(
response
).
build
();
}
catch
(
Exception
e
)
{
LOG
.
error
(
"Action failed: {}\nError: {}"
,
Response
.
Status
.
INTERNAL_SERVER_ERROR
,
e
.
getMessage
());
throw
new
WebApplicationException
(
e
,
Response
...
...
@@ -148,49 +135,4 @@ public class EntityResource {
@QueryParam
(
"numResults"
)
Integer
resultsPerPage
)
{
return
Response
.
ok
().
build
();
}
@POST
@Path
(
"validate/{entityType}"
)
@Consumes
(
MediaType
.
APPLICATION_JSON
)
@Produces
(
MediaType
.
APPLICATION_JSON
)
public
Response
validate
(
@Context
HttpServletRequest
request
,
@PathParam
(
"entityType"
)
String
entityType
)
{
return
Response
.
ok
().
build
();
}
@DELETE
@Path
(
"delete/{entityType}/{entityName}"
)
@Produces
(
MediaType
.
APPLICATION_JSON
)
public
Response
delete
(
@Context
HttpServletRequest
request
,
@PathParam
(
"entityType"
)
final
String
entityType
,
@PathParam
(
"entityName"
)
final
String
entityName
)
{
return
Response
.
ok
().
build
();
}
@POST
@Path
(
"update/{entityType}/{entityName}"
)
@Consumes
(
MediaType
.
APPLICATION_JSON
)
@Produces
(
MediaType
.
APPLICATION_JSON
)
public
Response
update
(
@Context
HttpServletRequest
request
,
@PathParam
(
"entityType"
)
final
String
entityType
,
@PathParam
(
"entityName"
)
final
String
entityName
)
{
return
Response
.
ok
().
build
();
}
@GET
@Path
(
"status/{entityType}/{entityName}"
)
@Produces
(
MediaType
.
APPLICATION_JSON
)
public
Response
getStatus
(
@PathParam
(
"entityType"
)
String
entityType
,
@PathParam
(
"entityName"
)
String
entityName
)
{
return
Response
.
ok
().
build
();
}
@GET
@Path
(
"dependencies/{entityType}/{entityName}"
)
@Produces
(
MediaType
.
APPLICATION_JSON
)
public
Response
getDependencies
(
@PathParam
(
"entityType"
)
String
entityType
,
@PathParam
(
"entityName"
)
String
entityName
)
{
return
Response
.
ok
().
build
();
}
}
webapp/src/main/java/org/apache/hadoop/metadata/web/resources/TypesResource.java
View file @
255fa079
...
...
@@ -18,12 +18,27 @@
package
org
.
apache
.
hadoop
.
metadata
.
web
.
resources
;
import
org.apache.hadoop.metadata.services.MetadataService
;
import
org.apache.hadoop.metadata.web.util.Servlets
;
import
org.codehaus.jettison.json.JSONArray
;
import
org.codehaus.jettison.json.JSONObject
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
javax.inject.Inject
;
import
javax.inject.Singleton
;
import
javax.servlet.http.HttpServletRequest
;
import
javax.ws.rs.*
;
import
javax.ws.rs.Consumes
;
import
javax.ws.rs.GET
;
import
javax.ws.rs.POST
;
import
javax.ws.rs.Path
;
import
javax.ws.rs.PathParam
;
import
javax.ws.rs.Produces
;
import
javax.ws.rs.WebApplicationException
;
import
javax.ws.rs.core.Context
;
import
javax.ws.rs.core.MediaType
;
import
javax.ws.rs.core.Response
;
import
java.util.List
;
/**
* This class provides RESTful API for Types.
...
...
@@ -35,37 +50,77 @@ import javax.ws.rs.core.Response;
@Singleton
public
class
TypesResource
{
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
EntityResource
.
class
);
private
final
MetadataService
metadataService
;
@Inject
public
TypesResource
(
MetadataService
metadataService
)
{
this
.
metadataService
=
metadataService
;
}
@POST
@Path
(
"submit/{type}"
)
@Path
(
"submit/{type
Name
}"
)
@Consumes
(
MediaType
.
APPLICATION_JSON
)
@Produces
(
MediaType
.
APPLICATION_JSON
)
public
Response
submit
(
@Context
HttpServletRequest
request
,
@PathParam
(
"type"
)
String
type
)
{
return
Response
.
ok
().
build
();
@PathParam
(
"typeName"
)
String
typeName
)
{
try
{
final
String
typeDefinition
=
Servlets
.
getRequestPayload
(
request
);
LOG
.
debug
(
"creating type {} with definition {} "
,
typeName
,
typeDefinition
);
JSONObject
typesAdded
=
metadataService
.
createType
(
typeName
,
typeDefinition
);
JSONObject
response
=
new
JSONObject
();
response
.
put
(
"typeName"
,
typeName
);
response
.
put
(
"types"
,
typesAdded
);
response
.
put
(
"requestId"
,
Thread
.
currentThread
().
getName
());
return
Response
.
ok
(
response
).
build
();
}
catch
(
Exception
e
)
{
LOG
.
error
(
"Unable to persist entity object"
,
e
);
throw
new
WebApplicationException
(
Servlets
.
getErrorResponse
(
e
,
Response
.
Status
.
BAD_REQUEST
));
}
}
@GET
@Path
(
"definition/{type}"
)
@Path
(
"definition/{type
Name
}"
)
@Produces
(
MediaType
.
APPLICATION_JSON
)
public
Response
getDefinition
(
@Context
HttpServletRequest
request
,
@PathParam
(
"type
"
)
String
typ
e
)
{
return
Response
.
ok
().
build
();
}
@PathParam
(
"type
Name"
)
String
typeNam
e
)
{
try
{
final
String
typeDefinition
=
metadataService
.
getTypeDefinition
(
typeName
);
@DELETE
@Path
(
"delete/{type}"
)
@Produces
(
MediaType
.
APPLICATION_JSON
)
public
Response
delete
(
@Context
HttpServletRequest
request
,
@PathParam
(
"type"
)
String
type
)
{
// todo - should this be supported?
return
Response
.
status
(
Response
.
Status
.
BAD_REQUEST
).
build
();
JSONObject
response
=
new
JSONObject
();
response
.
put
(
"typeName"
,
typeName
);
response
.
put
(
"definition"
,
typeDefinition
);
response
.
put
(
"requestId"
,
Thread
.
currentThread
().
getName
());
return
Response
.
ok
(
response
).
build
();
}
catch
(
Exception
e
)
{
LOG
.
error
(
"Unable to persist entity object"
,
e
);
throw
new
WebApplicationException
(
Servlets
.
getErrorResponse
(
e
,
Response
.
Status
.
BAD_REQUEST
));
}
}
@
POS
T
@Path
(
"
update/{type}
"
)
@
GE
T
@Path
(
"
list
"
)
@Produces
(
MediaType
.
APPLICATION_JSON
)
public
Response
update
(
@Context
HttpServletRequest
request
,
@PathParam
(
"type"
)
String
type
)
{
return
Response
.
ok
().
build
();
public
Response
getTypeNames
(
@Context
HttpServletRequest
request
)
{
try
{
final
List
<
String
>
typeNamesList
=
metadataService
.
getTypeNamesList
();
JSONObject
response
=
new
JSONObject
();
response
.
put
(
"list"
,
new
JSONArray
(
typeNamesList
));
response
.
put
(
"requestId"
,
Thread
.
currentThread
().
getName
());
return
Response
.
ok
(
response
).
build
();
}
catch
(
Exception
e
)
{
LOG
.
error
(
"Unable to persist entity object"
,
e
);
throw
new
WebApplicationException
(
Servlets
.
getErrorResponse
(
e
,
Response
.
Status
.
BAD_REQUEST
));
}
}
}
webapp/src/main/java/org/apache/hadoop/metadata/web/util/Servlets.java
View file @
255fa079
...
...
@@ -18,12 +18,15 @@
package
org
.
apache
.
hadoop
.
metadata
.
web
.
util
;
import
org.apache.commons.io.IOUtils
;
import
org.apache.commons.lang.StringUtils
;
import
org.codehaus.jettison.json.JSONObject
;
import
javax.servlet.http.HttpServletRequest
;
import
javax.ws.rs.core.MediaType
;
import
javax.ws.rs.core.Response
;
import
java.io.IOException
;
import
java.io.StringWriter
;
/**
* Utility functions for dealing with servlets.
...
...
@@ -102,4 +105,10 @@ public final class Servlets {
.
type
(
MediaType
.
APPLICATION_JSON
)
.
build
();
}
public
static
String
getRequestPayload
(
HttpServletRequest
request
)
throws
IOException
{
StringWriter
writer
=
new
StringWriter
();
IOUtils
.
copy
(
request
.
getInputStream
(),
writer
);
return
writer
.
toString
();
}
}
webapp/src/main/resources/application.properties
0 → 100644
View file @
255fa079
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
######### Graph Database Configs #########
# Graph implementation
#metadata.graph.blueprints.graph=com.thinkaurelius.titan.core.TitanFactory
# Graph Storage
metadata.graph.storage.backend
=
berkeleyje
metadata.graph.storage.directory
=
target/data/berkeley
# Graph Search Index
metadata.graph.index.search.backend
=
elasticsearch
metadata.graph.index.search.directory
=
target/data/es
metadata.graph.index.search.elasticsearch.client-only
=
false
metadata.graph.index.search.elasticsearch.local-mode
=
true
######### Graph Database Configs #########
######### Security Properties #########
# SSL config
metadata.enableTLS
=
false
######### Security Properties #########
webapp/src/main/webapp/WEB-INF/web.xml
View file @
255fa079
...
...
@@ -40,9 +40,6 @@
</filter-mapping>
<listener>
<listener-class>
org.apache.hadoop.metadata.web.listeners.ApplicationStartupListener
</listener-class>
</listener>
<listener>
<listener-class>
org.apache.hadoop.metadata.web.listeners.GuiceServletConfig
</listener-class>
</listener>
</web-app>
webapp/src/test/java/org/apache/hadoop/metadata/web/resources/BaseResourceIT.java
View file @
255fa079
package
org
.
apache
.
hadoop
.
metadata
.
web
.
resources
;
import
com.google.common.collect.ImmutableList
;
import
com.sun.jersey.api.client.Client
;
import
com.sun.jersey.api.client.WebResource
;
import
com.sun.jersey.api.client.config.DefaultClientConfig
;
import
org.apache.hadoop.metadata.types.AttributeDefinition
;
import
org.apache.hadoop.metadata.types.ClassType
;
import
org.apache.hadoop.metadata.types.HierarchicalTypeDefinition
;
import
org.apache.hadoop.metadata.types.IDataType
;
import
org.apache.hadoop.metadata.types.Multiplicity
;
import
org.apache.hadoop.metadata.types.TraitType
;
import
org.testng.annotations.BeforeClass
;
import
javax.ws.rs.core.UriBuilder
;
...
...
@@ -21,4 +28,22 @@ public class BaseResourceIT {
service
=
client
.
resource
(
UriBuilder
.
fromUri
(
baseUrl
).
build
());
}
protected
AttributeDefinition
createRequiredAttrDef
(
String
name
,
IDataType
dataType
)
{
return
new
AttributeDefinition
(
name
,
dataType
.
getName
(),
Multiplicity
.
REQUIRED
,
false
,
null
);
}
@SuppressWarnings
(
"unchecked"
)
protected
HierarchicalTypeDefinition
<
TraitType
>
createTraitTypeDef
(
String
name
,
ImmutableList
<
String
>
superTypes
,
AttributeDefinition
...
attrDefs
)
{
return
new
HierarchicalTypeDefinition
(
TraitType
.
class
,
name
,
superTypes
,
attrDefs
);
}
@SuppressWarnings
(
"unchecked"
)
protected
HierarchicalTypeDefinition
<
ClassType
>
createClassTypeDef
(
String
name
,
ImmutableList
<
String
>
superTypes
,
AttributeDefinition
...
attrDefs
)
{
return
new
HierarchicalTypeDefinition
(
ClassType
.
class
,
name
,
superTypes
,
attrDefs
);
}
}
webapp/src/test/java/org/apache/hadoop/metadata/web/resources/EntityJerseyResourceIT.java
View file @
255fa079
...
...
@@ -38,6 +38,7 @@ import com.sun.jersey.api.client.WebResource;
/**
* Integration tests for Entity Jersey Resource.
*/
@Test
(
enabled
=
false
)
public
class
EntityJerseyResourceIT
extends
BaseResourceIT
{
private
static
final
String
ENTITY_NAME
=
"clicks-table"
;
...
...
@@ -45,7 +46,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
private
static
final
String
DATABASE_NAME
=
"ads"
;
private
static
final
String
TABLE_NAME
=
"clicks-table"
;
@Test
@Test
(
enabled
=
false
)
public
void
testSubmitEntity
()
{
String
entityStream
=
getTestEntityJSON
();
JsonParser
parser
=
new
JsonParser
();
...
...
@@ -73,7 +74,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}
}
@Test
(
dependsOnMethods
=
"testSubmitEntity"
)
@Test
(
dependsOnMethods
=
"testSubmitEntity"
,
enabled
=
false
)
public
void
testGetEntityDefinition
()
{
WebResource
resource
=
service
.
path
(
"api/metadata/entities/definition"
)
...
...
@@ -98,7 +99,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
return
JSONValue
.
toJSONString
(
props
);
}
@Test
@Test
(
enabled
=
false
)
public
void
testGetInvalidEntityDefinition
()
{
WebResource
resource
=
service
.
path
(
"api/metadata/entities/definition"
)
...
...
@@ -114,7 +115,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
System
.
out
.
println
(
"response = "
+
response
);
}
@Test
(
dependsOnMethods
=
"testSubmitEntity"
)
@Test
(
dependsOnMethods
=
"testSubmitEntity"
,
enabled
=
false
)
public
void
testGetEntityList
()
{
ClientResponse
clientResponse
=
service
.
path
(
"api/metadata/entities/list/"
)
...
...
webapp/src/test/java/org/apache/hadoop/metadata/web/resources/RexsterGraphJerseyResourceIT.java
View file @
255fa079
...
...
@@ -12,6 +12,7 @@ import javax.ws.rs.core.Response;
/**
* Integration tests for Rexster Graph Jersey Resource.
*/
@Test
public
class
RexsterGraphJerseyResourceIT
extends
BaseResourceIT
{
@Test
(
enabled
=
false
)
...
...
@@ -31,7 +32,6 @@ public class RexsterGraphJerseyResourceIT extends BaseResourceIT {
Assert
.
assertNotNull
(
response
);
}
@Test
public
void
testGetVertexWithInvalidId
()
throws
Exception
{
WebResource
resource
=
service
.
path
(
"api/metadata/graph/vertices/blah"
);
...
...
@@ -43,22 +43,18 @@ public class RexsterGraphJerseyResourceIT extends BaseResourceIT {
Assert
.
assertEquals
(
clientResponse
.
getStatus
(),
Response
.
Status
.
NOT_FOUND
.
getStatusCode
());
}
@Test
public
void
testGetVertexProperties
()
throws
Exception
{
}
@Test
public
void
testGetVertices
()
throws
Exception
{
}
@Test
public
void
testGetVertexEdges
()
throws
Exception
{
}
@Test
public
void
testGetEdge
()
throws
Exception
{
}
...
...
webapp/src/test/java/org/apache/hadoop/metadata/web/resources/TypesJerseyResourceIT.java
0 → 100644
View file @
255fa079
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
web
.
resources
;
import
com.google.common.collect.ImmutableList
;
import
com.sun.jersey.api.client.ClientResponse
;
import
com.sun.jersey.api.client.WebResource
;
import
org.apache.hadoop.metadata.MetadataException
;
import
org.apache.hadoop.metadata.json.TypesSerialization
;
import
org.apache.hadoop.metadata.types.AttributeDefinition
;
import
org.apache.hadoop.metadata.types.ClassType
;
import
org.apache.hadoop.metadata.types.DataTypes
;
import
org.apache.hadoop.metadata.types.HierarchicalTypeDefinition
;
import
org.apache.hadoop.metadata.types.Multiplicity
;
import
org.apache.hadoop.metadata.types.StructTypeDefinition
;
import
org.apache.hadoop.metadata.types.TraitType
;
import
org.apache.hadoop.metadata.types.TypeSystem
;
import
org.testng.Assert
;
import
org.testng.annotations.AfterClass
;
import
org.testng.annotations.BeforeClass
;
import
org.testng.annotations.Test
;
import
javax.ws.rs.HttpMethod
;
import
javax.ws.rs.core.MediaType
;
import
javax.ws.rs.core.Response
;
import
java.util.ArrayList
;
import
java.util.List
;
/**
* Integration test for types jersey resource.
*/
public
class
TypesJerseyResourceIT
extends
BaseResourceIT
{
private
TypeSystem
typeSystem
;
private
List
<
HierarchicalTypeDefinition
>
typeDefinitions
;
@BeforeClass
public
void
setUp
()
throws
Exception
{
super
.
setUp
();
typeSystem
=
new
TypeSystem
();
typeDefinitions
=
createHiveTypes
();
}
@AfterClass
public
void
tearDown
()
throws
Exception
{
typeDefinitions
.
clear
();
}
@Test
public
void
testSubmit
()
throws
Exception
{
for
(
HierarchicalTypeDefinition
typeDefinition
:
typeDefinitions
)
{
String
typesAsJSON
=
TypesSerialization
.
toJson
(
typeSystem
,
typeDefinition
.
typeName
);
System
.
out
.
println
(
"typesAsJSON = "
+
typesAsJSON
);
WebResource
resource
=
service
.
path
(
"api/metadata/types/submit"
)
.
path
(
typeDefinition
.
typeName
);
ClientResponse
clientResponse
=
resource
.
accept
(
MediaType
.
APPLICATION_JSON
)
.
type
(
MediaType
.
APPLICATION_JSON
)
.
method
(
HttpMethod
.
POST
,
ClientResponse
.
class
,
typesAsJSON
);
Assert
.
assertEquals
(
clientResponse
.
getStatus
(),
Response
.
Status
.
OK
.
getStatusCode
());
String
response
=
clientResponse
.
getEntity
(
String
.
class
);
Assert
.
assertNotNull
(
response
);
}
}
@Test
public
void
testGetDefinition
()
throws
Exception
{
}
@Test
public
void
testGetTypeNames
()
throws
Exception
{
}
private
List
<
HierarchicalTypeDefinition
>
createHiveTypes
()
throws
MetadataException
{
ArrayList
<
HierarchicalTypeDefinition
>
typeDefinitions
=
new
ArrayList
<>();
HierarchicalTypeDefinition
<
ClassType
>
databaseTypeDefinition
=
createClassTypeDef
(
"database"
,
ImmutableList
.<
String
>
of
(),
createRequiredAttrDef
(
"name"
,
DataTypes
.
STRING_TYPE
),
createRequiredAttrDef
(
"description"
,
DataTypes
.
STRING_TYPE
));
typeDefinitions
.
add
(
databaseTypeDefinition
);
HierarchicalTypeDefinition
<
ClassType
>
tableTypeDefinition
=
createClassTypeDef
(
"table"
,
ImmutableList
.<
String
>
of
(),
createRequiredAttrDef
(
"name"
,
DataTypes
.
STRING_TYPE
),
createRequiredAttrDef
(
"description"
,
DataTypes
.
STRING_TYPE
),
createRequiredAttrDef
(
"type"
,
DataTypes
.
STRING_TYPE
),
new
AttributeDefinition
(
"database"
,
"database"
,
Multiplicity
.
REQUIRED
,
false
,
"database"
));
typeDefinitions
.
add
(
tableTypeDefinition
);
HierarchicalTypeDefinition
<
TraitType
>
fetlTypeDefinition
=
createTraitTypeDef
(
"fetl"
,
ImmutableList
.<
String
>
of
(),
createRequiredAttrDef
(
"level"
,
DataTypes
.
INT_TYPE
));
typeDefinitions
.
add
(
fetlTypeDefinition
);
typeSystem
.
defineTypes
(
ImmutableList
.<
StructTypeDefinition
>
of
(),
ImmutableList
.
of
(
fetlTypeDefinition
),
ImmutableList
.
of
(
databaseTypeDefinition
,
tableTypeDefinition
));
return
typeDefinitions
;
}
}
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment