Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
9d01934b
Commit
9d01934b
authored
Mar 03, 2015
by
Venkatesh Seetharam
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
BUG-32591 Integrate Search DSL with Graph Repository. Contributed by Venkatesh Seetharam
parent
8e5d4827
Show whitespace changes
Inline
Side-by-side
Showing
13 changed files
with
577 additions
and
212 deletions
+577
-212
pom.xml
pom.xml
+1
-9
RepositoryMetadataModule.java
.../org/apache/hadoop/metadata/RepositoryMetadataModule.java
+0
-7
DiscoveryException.java
.../apache/hadoop/metadata/discovery/DiscoveryException.java
+74
-0
DiscoveryService.java
...rg/apache/hadoop/metadata/discovery/DiscoveryService.java
+14
-5
GraphBackedDiscoveryService.java
...adoop/metadata/discovery/GraphBackedDiscoveryService.java
+163
-116
MetadataRepository.java
...apache/hadoop/metadata/repository/MetadataRepository.java
+2
-0
Constants.java
...rg/apache/hadoop/metadata/repository/graph/Constants.java
+13
-11
GraphBackedMetadataRepository.java
...adata/repository/graph/GraphBackedMetadataRepository.java
+73
-17
GraphBackedSearchIndexer.java
...p/metadata/repository/graph/GraphBackedSearchIndexer.java
+2
-1
DefaultGraphPersistenceStrategy.java
...doop/metadata/search/DefaultGraphPersistenceStrategy.java
+182
-0
GraphBackedDiscoveryServiceTest.java
...p/metadata/discovery/GraphBackedDiscoveryServiceTest.java
+50
-24
graph.properties
repository/src/test/resources/graph.properties
+3
-1
EntityJerseyResourceIT.java
...hadoop/metadata/web/resources/EntityJerseyResourceIT.java
+0
-21
No files found.
pom.xml
View file @
9d01934b
...
...
@@ -309,12 +309,6 @@
<version>
${falcon.version}
</version>
</dependency>
<dependency>
<groupId>
com.sun.jersey
</groupId>
<artifactId>
jersey-client
</artifactId>
<version>
1.9
</version>
</dependency>
<!-- Logging -->
<dependency>
<groupId>
org.slf4j
</groupId>
...
...
@@ -701,9 +695,7 @@
<artifactId>
maven-surefire-plugin
</artifactId>
<version>
2.7.2
</version>
<configuration>
<systemPropertyVariables>
<tapestry.execution-mode>
Qa
</tapestry.execution-mode>
</systemPropertyVariables>
<skipTests>
true
</skipTests>
</configuration>
</plugin>
...
...
repository/src/main/java/org/apache/hadoop/metadata/RepositoryMetadataModule.java
View file @
9d01934b
...
...
@@ -16,15 +16,8 @@
* limitations under the License.
*/
/*
* Created by IntelliJ IDEA.
* User: seetharam
* Date: 12/1/14
* Time: 2:21 PM
*/
package
org
.
apache
.
hadoop
.
metadata
;
import
com.google.inject.Scopes
;
import
com.google.inject.throwingproviders.ThrowingProviderBinder
;
import
com.thinkaurelius.titan.core.TitanGraph
;
...
...
repository/src/main/java/org/apache/hadoop/metadata/discovery/DiscoveryException.java
0 → 100644
View file @
9d01934b
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
discovery
;
import
org.apache.hadoop.metadata.MetadataException
;
import
java.security.PrivilegedActionException
;
public
class
DiscoveryException
extends
MetadataException
{
/**
* Constructs a new exception with the specified detail message. The
* cause is not initialized, and may subsequently be initialized by
* a call to {@link #initCause}.
*
* @param message the detail message. The detail message is saved for
* later retrieval by the {@link #getMessage()} method.
*/
public
DiscoveryException
(
String
message
)
{
super
(
message
);
}
/**
* Constructs a new exception with the specified detail message and
* cause. <p>Note that the detail message associated with
* {@code cause} is <i>not</i> automatically incorporated in
* this exception's detail message.
*
* @param message the detail message (which is saved for later retrieval
* by the {@link #getMessage()} method).
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A <tt>null</tt> value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
* @since 1.4
*/
public
DiscoveryException
(
String
message
,
Throwable
cause
)
{
super
(
message
,
cause
);
}
/**
* Constructs a new exception with the specified cause and a detail
* message of <tt>(cause==null ? null : cause.toString())</tt> (which
* typically contains the class and detail message of <tt>cause</tt>).
* This constructor is useful for exceptions that are little more than
* wrappers for other throwables (for example, {@link
* PrivilegedActionException}).
*
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A <tt>null</tt> value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
* @since 1.4
*/
public
DiscoveryException
(
Throwable
cause
)
{
super
(
cause
);
}
}
repository/src/main/java/org/apache/hadoop/metadata/discovery/DiscoveryService.java
View file @
9d01934b
...
...
@@ -18,7 +18,6 @@
package
org
.
apache
.
hadoop
.
metadata
.
discovery
;
import
org.apache.hadoop.metadata.MetadataException
;
import
org.codehaus.jettison.json.JSONObject
;
import
java.util.HashMap
;
...
...
@@ -32,28 +31,38 @@ import java.util.Set;
public
interface
DiscoveryService
{
/**
* Search using query DSL.
*
* @param dslQuery query in DSL format.
* @return JSON representing the type and results.
*/
String
searchByDSL
(
String
dslQuery
)
throws
DiscoveryException
;
/**
* Assumes the User is familiar with the persistence structure of the Repository.
* The given query is run uninterpreted against the underlying Graph Store.
* The results are returned as a List of Rows. each row is a Map of Key,Value pairs.
*
* @param gremlinQuery query in gremlin dsl format
* @return List of Maps
* @throws org.apache.hadoop.metadata.
Metadata
Exception
* @throws org.apache.hadoop.metadata.
discovery.Discovery
Exception
*/
List
<
Map
<
String
,
String
>>
searchByGremlin
(
String
gremlinQuery
)
throws
Metadata
Exception
;
List
<
Map
<
String
,
String
>>
searchByGremlin
(
String
gremlinQuery
)
throws
Discovery
Exception
;
/**
* Simple direct graph search and depth traversal.
* @param searchText is plain text
* @param prop is the Vertex property to search.
*/
Map
<
String
,
HashMap
<
String
,
JSONObject
>>
textSearch
(
String
searchText
,
int
depth
,
String
prop
);
Map
<
String
,
HashMap
<
String
,
JSONObject
>>
textSearch
(
String
searchText
,
int
depth
,
String
prop
);
/**
* Simple graph walker for search interface, which allows following of specific edges only.
* @param edgesToFollow is a comma-separated-list of edges to follow.
*/
Map
<
String
,
HashMap
<
String
,
JSONObject
>>
relationshipWalk
(
String
guid
,
int
depth
,
String
edgesToFollow
);
Map
<
String
,
HashMap
<
String
,
JSONObject
>>
relationshipWalk
(
String
guid
,
int
depth
,
String
edgesToFollow
);
/**
* Return a Set of indexed properties in the graph.
...
...
repository/src/main/java/org/apache/hadoop/metadata/discovery/GraphBackedDiscoveryService.java
View file @
9d01934b
...
...
@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
*
<p/>
* http://www.apache.org/licenses/LICENSE-2.0
*
*
<p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
...
...
@@ -26,13 +26,22 @@ import com.tinkerpop.blueprints.Edge;
import
com.tinkerpop.blueprints.GraphQuery
;
import
com.tinkerpop.blueprints.Vertex
;
import
org.apache.commons.collections.iterators.IteratorChain
;
import
org.apache.hadoop.metadata.MetadataException
;
import
org.apache.hadoop.metadata.query.Expressions
;
import
org.apache.hadoop.metadata.query.GremlinEvaluator
;
import
org.apache.hadoop.metadata.query.GremlinQuery
;
import
org.apache.hadoop.metadata.query.GremlinQueryResult
;
import
org.apache.hadoop.metadata.query.GremlinTranslator
;
import
org.apache.hadoop.metadata.query.QueryParser
;
import
org.apache.hadoop.metadata.query.QueryProcessor
;
import
org.apache.hadoop.metadata.repository.MetadataRepository
;
import
org.apache.hadoop.metadata.repository.graph.GraphHelper
;
import
org.apache.hadoop.metadata.repository.graph.TitanGraphService
;
import
org.apache.hadoop.metadata.s
torage.RepositoryException
;
import
org.apache.hadoop.metadata.s
earch.DefaultGraphPersistenceStrategy
;
import
org.codehaus.jettison.json.JSONObject
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
scala.util.Either
;
import
scala.util.parsing.combinator.Parsers
;
import
javax.inject.Inject
;
import
javax.script.Bindings
;
...
...
@@ -51,81 +60,107 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
GraphBackedDiscoveryService
.
class
);
private
final
TitanGraph
titanGraph
;
private
final
DefaultGraphPersistenceStrategy
graphPersistenceStrategy
;
@Inject
GraphBackedDiscoveryService
(
TitanGraphService
graphService
)
throws
MetadataException
{
GraphBackedDiscoveryService
(
TitanGraphService
graphService
,
MetadataRepository
metadataRepository
)
throws
DiscoveryException
{
this
.
titanGraph
=
graphService
.
getTitanGraph
();
this
.
graphPersistenceStrategy
=
new
DefaultGraphPersistenceStrategy
(
metadataRepository
);
}
private
static
void
searchWalker
(
Vertex
vtx
,
final
int
max
,
int
counter
,
HashMap
<
String
,
JSONObject
>
e
,
HashMap
<
String
,
JSONObject
>
v
,
String
edgesToFollow
)
{
counter
++;
if
(
counter
<=
max
)
{
Map
<
String
,
String
>
jsonVertexMap
=
new
HashMap
<>();
Iterator
<
Edge
>
edgeIterator
;
// If we're doing a lineage traversal, only follow the edges specified by the query.
// Otherwise return them all.
if
(
edgesToFollow
!=
null
)
{
IteratorChain
ic
=
new
IteratorChain
();
for
(
String
iterateOn:
edgesToFollow
.
split
(
","
)){
ic
.
addIterator
(
vtx
.
query
().
labels
(
iterateOn
).
edges
().
iterator
());
/**
* Search using query DSL.
*
* @param dslQuery query in DSL format.
* @return JSON representing the type and results.
*/
@Override
public
String
searchByDSL
(
String
dslQuery
)
throws
DiscoveryException
{
QueryParser
queryParser
=
new
QueryParser
();
Either
<
Parsers
.
NoSuccess
,
Expressions
.
Expression
>
either
=
queryParser
.
apply
(
dslQuery
);
if
(
either
.
isRight
())
{
Expressions
.
Expression
expression
=
either
.
right
().
get
();
GremlinQueryResult
queryResult
=
evaluate
(
expression
);
return
queryResult
.
toJson
();
}
edgeIterator
=
ic
;
}
else
{
edgeIterator
=
vtx
.
query
().
edges
().
iterator
();
throw
new
DiscoveryException
(
"Invalid expression : "
+
dslQuery
);
}
//Iterator<Edge> edgeIterator = vtx.query().labels("Fathered").edges().iterator();
jsonVertexMap
.
put
(
"HasRelationships"
,
((
Boolean
)
edgeIterator
.
hasNext
()).
toString
());
for
(
String
pKey:
vtx
.
getPropertyKeys
())
{
jsonVertexMap
.
put
(
pKey
,
vtx
.
getProperty
(
pKey
).
toString
());
private
GremlinQueryResult
evaluate
(
Expressions
.
Expression
expression
)
{
Expressions
.
Expression
validatedExpression
=
QueryProcessor
.
validate
(
expression
);
GremlinQuery
gremlinQuery
=
new
GremlinTranslator
(
validatedExpression
,
graphPersistenceStrategy
).
translate
();
System
.
out
.
println
(
"---------------------"
);
System
.
out
.
println
(
"Query = "
+
validatedExpression
);
System
.
out
.
println
(
"Expression Tree = "
+
validatedExpression
.
treeString
());
System
.
out
.
println
(
"Gremlin Query = "
+
gremlinQuery
.
queryStr
());
System
.
out
.
println
(
"---------------------"
);
return
new
GremlinEvaluator
(
gremlinQuery
,
graphPersistenceStrategy
,
titanGraph
).
evaluate
();
}
// Add to the Vertex map.
v
.
put
(
vtx
.
getId
().
toString
(),
new
JSONObject
(
jsonVertexMap
));
// Follow this Vertex's edges if this isn't the last level of depth
if
(
counter
<
max
)
{
while
(
edgeIterator
!=
null
&&
edgeIterator
.
hasNext
())
{
/**
* Assumes the User is familiar with the persistence structure of the Repository.
* The given query is run uninterpreted against the underlying Graph Store.
* The results are returned as a List of Rows. each row is a Map of Key,Value pairs.
*
* @param gremlinQuery query in gremlin dsl format
* @return List of Maps
* @throws org.apache.hadoop.metadata.discovery.DiscoveryException
*/
@Override
public
List
<
Map
<
String
,
String
>>
searchByGremlin
(
String
gremlinQuery
)
throws
DiscoveryException
{
ScriptEngineManager
manager
=
new
ScriptEngineManager
();
ScriptEngine
engine
=
manager
.
getEngineByName
(
"gremlin-groovy"
);
Bindings
bindings
=
engine
.
createBindings
();
bindings
.
put
(
"g"
,
titanGraph
);
Edge
edge
=
edgeIterator
.
next
();
String
label
=
edge
.
getLabel
();
try
{
Object
o
=
engine
.
eval
(
gremlinQuery
,
bindings
);
if
(!(
o
instanceof
List
))
{
throw
new
DiscoveryException
(
String
.
format
(
"Cannot process gremlin result %s"
,
o
.
toString
()));
}
Map
<
String
,
String
>
jsonEdgeMap
=
new
HashMap
<>()
;
String
tail
=
edge
.
getVertex
(
Direction
.
OUT
).
getId
().
toString
();
String
head
=
edge
.
getVertex
(
Direction
.
IN
).
getId
().
toString
();
List
l
=
(
List
)
o
;
List
<
Map
<
String
,
String
>>
result
=
new
ArrayList
<>
();
for
(
Object
r
:
l
)
{
jsonEdgeMap
.
put
(
"tail"
,
tail
);
jsonEdgeMap
.
put
(
"head"
,
head
);
jsonEdgeMap
.
put
(
"label"
,
label
);
Map
<
String
,
String
>
oRow
=
new
HashMap
<>();
if
(
r
instanceof
Map
)
{
@SuppressWarnings
(
"unchecked"
)
Map
<
Object
,
Object
>
iRow
=
(
Map
)
r
;
for
(
Map
.
Entry
e
:
iRow
.
entrySet
())
{
Object
k
=
e
.
getKey
();
Object
v
=
e
.
getValue
();
oRow
.
put
(
k
.
toString
(),
v
.
toString
());
}
}
else
if
(
r
instanceof
TitanVertex
)
{
Iterable
<
TitanProperty
>
ps
=
((
TitanVertex
)
r
).
getProperties
();
for
(
TitanProperty
tP
:
ps
)
{
String
pName
=
tP
.
getPropertyKey
().
getName
();
Object
pValue
=
((
TitanVertex
)
r
).
getProperty
(
pName
);
if
(
pValue
!=
null
)
{
oRow
.
put
(
pName
,
pValue
.
toString
());
}
}
Direction
d
;
if
(
tail
.
equals
(
vtx
.
getId
().
toString
()))
{
d
=
Direction
.
IN
;
}
else
if
(
r
instanceof
String
)
{
oRow
.
put
(
""
,
r
.
toString
());
}
else
{
d
=
Direction
.
OUT
;
throw
new
DiscoveryException
(
String
.
format
(
"Cannot process gremlin result %s"
,
o
.
toString
()));
}
/* If we want an Edge's property keys, uncomment here. Or we can parameterize it.
* Code is here now for reference/memory-jogging.
for (String pKey: edge.getPropertyKeys()) {
jsonEdgeMap.put(pKey, edge.getProperty(pKey).toString());
result
.
add
(
oRow
);
}
*/
return
result
;
e
.
put
(
edge
.
getId
().
toString
(),
new
JSONObject
(
jsonEdgeMap
));
searchWalker
(
edge
.
getVertex
(
d
),
max
,
counter
,
e
,
v
,
edgesToFollow
);
}
catch
(
ScriptException
se
)
{
throw
new
DiscoveryException
(
se
);
}
}
}
}
/*
* Simple direct graph search and depth traversal.
...
...
@@ -133,14 +168,14 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
* @param prop is the Vertex property to search.
*/
@Override
public
Map
<
String
,
HashMap
<
String
,
JSONObject
>>
textSearch
(
String
searchText
,
public
Map
<
String
,
HashMap
<
String
,
JSONObject
>>
textSearch
(
String
searchText
,
int
depth
,
String
prop
)
{
HashMap
<
String
,
HashMap
<
String
,
JSONObject
>>
result
=
new
HashMap
<>();
HashMap
<
String
,
HashMap
<
String
,
JSONObject
>>
result
=
new
HashMap
<>();
// HashMaps, which contain sub JOSN Objects to be relayed back to the parent.
HashMap
<
String
,
JSONObject
>
vertices
=
new
HashMap
<>();
HashMap
<
String
,
JSONObject
>
edges
=
new
HashMap
<>();
HashMap
<
String
,
JSONObject
>
vertices
=
new
HashMap
<>();
HashMap
<
String
,
JSONObject
>
edges
=
new
HashMap
<>();
/* todo: Later - when we allow search limitation by "type".
ArrayList<String> typesList = new ArrayList<String>();
...
...
@@ -150,15 +185,17 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
if (typesList.contains(s)) {
LOG.error("Specifyed type is not a member of the Type System= {}", s);
throw new WebApplicationException(
Servlets.getErrorResponse("Invalid type specified in query.", Response.Status.INTERNAL_SERVER_ERROR));
Servlets.getErrorResponse("Invalid type specified in query.", Response
.Status.INTERNAL_SERVER_ERROR));
}
typesList.add(s);
}*/
int
resultCount
=
0
;
//for (Result<Vertex> v: g.indexQuery(Constants.VERTEX_INDEX, "v." + prop + ":(" + searchText + ")").vertices()) {
for
(
Vertex
v:
((
GraphQuery
)
titanGraph
.
query
().
has
(
prop
,
searchText
)).
vertices
())
{
//for (Result<Vertex> v: g.indexQuery(Constants.VERTEX_INDEX, "v." + prop + ":(" +
// searchText + ")").vertices()) {
for
(
Vertex
v
:
((
GraphQuery
)
titanGraph
.
query
().
has
(
prop
,
searchText
)).
vertices
())
{
//searchWalker(v.getElement(), depth, 0, edges, vertices, null);
searchWalker
(
v
,
depth
,
0
,
edges
,
vertices
,
null
);
...
...
@@ -166,10 +203,10 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
}
LOG
.
debug
(
"Search for {} returned {} results."
,
searchText
,
resultCount
);
LOG
.
debug
(
"Search for {} returned {} results."
,
searchText
,
resultCount
);
result
.
put
(
"vertices"
,
vertices
);
result
.
put
(
"edges"
,
edges
);
result
.
put
(
"edges"
,
edges
);
return
result
;
}
...
...
@@ -179,14 +216,14 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
* @param edgesToFollow is a comma-separated-list of edges to follow.
*/
@Override
public
Map
<
String
,
HashMap
<
String
,
JSONObject
>>
relationshipWalk
(
String
guid
,
int
depth
,
public
Map
<
String
,
HashMap
<
String
,
JSONObject
>>
relationshipWalk
(
String
guid
,
int
depth
,
String
edgesToFollow
)
{
HashMap
<
String
,
HashMap
<
String
,
JSONObject
>>
result
=
new
HashMap
<>();
HashMap
<
String
,
HashMap
<
String
,
JSONObject
>>
result
=
new
HashMap
<>();
// HashMaps, which contain sub JOSN Objects to be relayed back to the parent.
HashMap
<
String
,
JSONObject
>
vertices
=
new
HashMap
<>();
HashMap
<
String
,
JSONObject
>
edges
=
new
HashMap
<>();
HashMap
<
String
,
JSONObject
>
vertices
=
new
HashMap
<>();
HashMap
<
String
,
JSONObject
>
edges
=
new
HashMap
<>();
// Get the Vertex with the specified GUID.
Vertex
v
=
GraphHelper
.
findVertexByGUID
(
titanGraph
,
guid
);
...
...
@@ -199,69 +236,79 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
}
result
.
put
(
"vertices"
,
vertices
);
result
.
put
(
"edges"
,
edges
);
result
.
put
(
"edges"
,
edges
);
return
result
;
}
/**
* Assumes the User is familiar with the persistence structure of the Repository.
* The given query is run uninterpreted against the underlying Graph Store.
* The results are returned as a List of Rows. each row is a Map of Key,Value pairs.
*
* @param gremlinQuery query in gremlin dsl format
* @return List of Maps
* @throws org.apache.hadoop.metadata.MetadataException
*/
@Override
public
List
<
Map
<
String
,
String
>>
searchByGremlin
(
String
gremlinQuery
)
throws
MetadataException
{
ScriptEngineManager
manager
=
new
ScriptEngineManager
();
ScriptEngine
engine
=
manager
.
getEngineByName
(
"gremlin-groovy"
);
Bindings
bindings
=
engine
.
createBindings
();
bindings
.
put
(
"g"
,
titanGraph
);
private
static
void
searchWalker
(
Vertex
vtx
,
final
int
max
,
int
counter
,
HashMap
<
String
,
JSONObject
>
e
,
HashMap
<
String
,
JSONObject
>
v
,
String
edgesToFollow
)
{
counter
++;
if
(
counter
<=
max
)
{
Map
<
String
,
String
>
jsonVertexMap
=
new
HashMap
<>();
Iterator
<
Edge
>
edgeIterator
;
try
{
Object
o
=
engine
.
eval
(
gremlinQuery
,
bindings
);
if
(
!(
o
instanceof
List
))
{
throw
new
RepositoryException
(
String
.
format
(
"Cannot process gremlin result %s"
,
o
.
toString
()));
// If we're doing a lineage traversal, only follow the edges specified by the query.
// Otherwise return them all.
if
(
edgesToFollow
!=
null
)
{
IteratorChain
ic
=
new
IteratorChain
();
for
(
String
iterateOn
:
edgesToFollow
.
split
(
","
))
{
ic
.
addIterator
(
vtx
.
query
().
labels
(
iterateOn
).
edges
().
iterator
());
}
List
l
=
(
List
)
o
;
List
<
Map
<
String
,
String
>>
result
=
new
ArrayList
<>();
for
(
Object
r
:
l
)
{
edgeIterator
=
ic
;
Map
<
String
,
String
>
oRow
=
new
HashMap
<>();
if
(
r
instanceof
Map
)
{
@SuppressWarnings
(
"unchecked"
)
Map
<
Object
,
Object
>
iRow
=
(
Map
)
r
;
for
(
Map
.
Entry
e
:
iRow
.
entrySet
())
{
Object
k
=
e
.
getKey
();
Object
v
=
e
.
getValue
();
oRow
.
put
(
k
.
toString
(),
v
.
toString
());
}
}
else
if
(
r
instanceof
TitanVertex
)
{
Iterable
<
TitanProperty
>
ps
=
((
TitanVertex
)
r
).
getProperties
();
for
(
TitanProperty
tP
:
ps
)
{
String
pName
=
tP
.
getPropertyKey
().
getName
();
Object
pValue
=
((
TitanVertex
)
r
).
getProperty
(
pName
);
if
(
pValue
!=
null
)
{
oRow
.
put
(
pName
,
pValue
.
toString
());
}
else
{
edgeIterator
=
vtx
.
query
().
edges
().
iterator
();
}
//Iterator<Edge> edgeIterator = vtx.query().labels("Fathered").edges().iterator();
jsonVertexMap
.
put
(
"HasRelationships"
,
((
Boolean
)
edgeIterator
.
hasNext
()).
toString
());
for
(
String
pKey
:
vtx
.
getPropertyKeys
())
{
jsonVertexMap
.
put
(
pKey
,
vtx
.
getProperty
(
pKey
).
toString
());
}
}
else
if
(
r
instanceof
String
)
{
oRow
.
put
(
""
,
r
.
toString
());
// Add to the Vertex map.
v
.
put
(
vtx
.
getId
().
toString
(),
new
JSONObject
(
jsonVertexMap
));
// Follow this Vertex's edges if this isn't the last level of depth
if
(
counter
<
max
)
{
while
(
edgeIterator
!=
null
&&
edgeIterator
.
hasNext
())
{
Edge
edge
=
edgeIterator
.
next
();
String
label
=
edge
.
getLabel
();
Map
<
String
,
String
>
jsonEdgeMap
=
new
HashMap
<>();
String
tail
=
edge
.
getVertex
(
Direction
.
OUT
).
getId
().
toString
();
String
head
=
edge
.
getVertex
(
Direction
.
IN
).
getId
().
toString
();
jsonEdgeMap
.
put
(
"tail"
,
tail
);
jsonEdgeMap
.
put
(
"head"
,
head
);
jsonEdgeMap
.
put
(
"label"
,
label
);
Direction
d
;
if
(
tail
.
equals
(
vtx
.
getId
().
toString
()))
{
d
=
Direction
.
IN
;
}
else
{
throw
new
RepositoryException
(
String
.
format
(
"Cannot process gremlin result %s"
,
o
.
toString
()))
;
d
=
Direction
.
OUT
;
}
result
.
add
(
oRow
);
/* If we want an Edge's property keys, uncomment here. Or we can parameterize
it.
Code is here now for reference/memory-jogging.
for (String pKey: edge.getPropertyKeys()) {
jsonEdgeMap.put(pKey, edge.getProperty(pKey).toString());
}
return
result
;
*/
}
catch
(
ScriptException
se
)
{
throw
new
RepositoryException
(
se
);
e
.
put
(
edge
.
getId
().
toString
(),
new
JSONObject
(
jsonEdgeMap
));
searchWalker
(
edge
.
getVertex
(
d
),
max
,
counter
,
e
,
v
,
edgesToFollow
);
}
}
}
}
...
...
repository/src/main/java/org/apache/hadoop/metadata/repository/MetadataRepository.java
View file @
9d01934b
...
...
@@ -30,6 +30,8 @@ import java.util.List;
*/
public
interface
MetadataRepository
extends
Service
{
String
getTypeAttributeName
();
String
createEntity
(
IReferenceableInstance
entity
,
String
entityType
)
throws
RepositoryException
;
...
...
repository/src/main/java/org/apache/hadoop/metadata/repository/graph/Constants.java
View file @
9d01934b
...
...
@@ -18,33 +18,35 @@
package
org
.
apache
.
hadoop
.
metadata
.
repository
.
graph
;
final
class
Constants
{
public
final
class
Constants
{
private
Constants
()
{
}
static
final
String
GUID_PROPERTY_KEY
=
"guid"
;
static
final
String
GUID_INDEX
=
"guid_index"
;
public
static
final
String
GUID_PROPERTY_KEY
=
"guid"
;
public
static
final
String
GUID_INDEX
=
"guid_index"
;
static
final
String
ENTITY_TYPE_PROPERTY_KEY
=
"type"
;
static
final
String
ENTITY_TYPE_INDEX
=
"type_index"
;
public
static
final
String
ENTITY_TYPE_PROPERTY_KEY
=
"type"
;
public
static
final
String
ENTITY_TYPE_INDEX
=
"type_index"
;
static
final
String
VERSION_PROPERTY_KEY
=
"version"
;
static
final
String
TIMESTAMP_PROPERTY_KEY
=
"timestamp"
;
public
static
final
String
VERSION_PROPERTY_KEY
=
"version"
;
public
static
final
String
TIMESTAMP_PROPERTY_KEY
=
"timestamp"
;
public
static
final
String
TRAIT_NAMES_PROPERTY_KEY
=
"traits"
;
/**
* search backing index name.
*/
static
final
String
BACKING_INDEX
=
"search"
;
static
final
String
INDEX_NAME
=
"metadata"
;
public
static
final
String
BACKING_INDEX
=
"search"
;
public
static
final
String
INDEX_NAME
=
"metadata"
;
/**
* search backing index name for vertex keys.
*/
static
final
String
VERTEX_INDEX
=
"vertex_index"
;
public
static
final
String
VERTEX_INDEX
=
"vertex_index"
;
/**
* search backing index name for edge labels.
*/
static
final
String
EDGE_INDEX
=
"edge_index"
;
public
static
final
String
EDGE_INDEX
=
"edge_index"
;
}
repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphBackedMetadataRepository.java
View file @
9d01934b
...
...
@@ -51,6 +51,7 @@ import java.io.IOException;
import
java.math.BigDecimal
;
import
java.math.BigInteger
;
import
java.util.ArrayList
;
import
java.util.Arrays
;
import
java.util.Collections
;
import
java.util.HashMap
;
import
java.util.Iterator
;
...
...
@@ -87,6 +88,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
this
.
titanGraph
=
((
TitanGraphService
)
graphService
).
getTitanGraph
();
}
public
GraphToTypedInstanceMapper
getGraphToInstanceMapper
()
{
return
graphToInstanceMapper
;
}
/**
* Starts the service. This method blocks until the service has completely started.
*
...
...
@@ -117,6 +122,23 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
@Override
public
String
getTypeAttributeName
()
{
return
Constants
.
ENTITY_TYPE_PROPERTY_KEY
;
}
public
String
getTraitLabel
(
IDataType
<?>
dataType
,
String
traitName
)
{
return
dataType
.
getName
()
+
"."
+
traitName
;
}
public
String
getFieldNameInVertex
(
IDataType
<?>
dataType
,
AttributeInfo
aInfo
)
{
return
dataType
.
getName
()
+
"."
+
aInfo
.
name
;
}
public
String
getEdgeLabel
(
IDataType
<?>
dataType
,
AttributeInfo
aInfo
)
{
return
dataType
.
getName
()
+
"."
+
aInfo
.
name
;
}
@Override
public
String
createEntity
(
IReferenceableInstance
typedInstance
,
String
typeName
)
throws
RepositoryException
{
LOG
.
info
(
"adding entity={} type={}"
,
typedInstance
,
typeName
);
...
...
@@ -172,6 +194,20 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return
entityList
;
}
public
Id
getIdFromVertex
(
String
dataTypeName
,
TitanVertex
vertex
)
{
return
new
Id
(
vertex
.<
String
>
getProperty
(
Constants
.
GUID_PROPERTY_KEY
),
vertex
.<
Integer
>
getProperty
(
Constants
.
VERSION_PROPERTY_KEY
),
dataTypeName
);
}
public
List
<
String
>
getTraitNames
(
TitanVertex
vertex
)
{
final
String
traitNames
=
vertex
.
getProperty
(
Constants
.
TRAIT_NAMES_PROPERTY_KEY
);
return
traitNames
==
null
?
Collections
.<
String
>
emptyList
()
:
Arrays
.
asList
(
traitNames
.
split
(
","
));
}
private
final
class
EntityProcessor
implements
ObjectGraphWalker
.
NodeProcessor
{
public
final
Map
<
Id
,
Id
>
idToNewIdMap
;
...
...
@@ -308,7 +344,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
for
(
String
traitName
:
typedInstance
.
getTraits
())
{
LOG
.
debug
(
"mapping trait {}"
,
traitName
);
((
TitanVertex
)
instanceVertex
).
addProperty
(
"traits"
,
traitName
);
((
TitanVertex
)
instanceVertex
).
addProperty
(
Constants
.
TRAIT_NAMES_PROPERTY_KEY
,
traitName
);
ITypedStruct
traitInstance
=
(
ITypedStruct
)
typedInstance
.
getTrait
(
traitName
);
// add the attributes for the trait instance
...
...
@@ -581,20 +617,23 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
}
p
rivate
final
class
GraphToTypedInstanceMapper
{
p
ublic
final
class
GraphToTypedInstanceMapper
{
p
rivate
ITypedReferenceableInstance
mapGraphToTypedInstance
(
String
guid
,
p
ublic
ITypedReferenceableInstance
mapGraphToTypedInstance
(
String
guid
,
Vertex
instanceVertex
)
throws
MetadataException
{
LOG
.
debug
(
"Mapping graph root vertex {} to typed instance for guid {}"
,
instanceVertex
,
guid
);
String
typeName
=
instanceVertex
.
getProperty
(
Constants
.
ENTITY_TYPE_PROPERTY_KEY
);
List
<
String
>
traits
=
new
ArrayList
<>();
for
(
TitanProperty
property
:
((
TitanVertex
)
instanceVertex
).
getProperties
(
"traits"
))
{
for
(
TitanProperty
property
:
((
TitanVertex
)
instanceVertex
)
.
getProperties
(
Constants
.
TRAIT_NAMES_PROPERTY_KEY
))
{
traits
.
add
((
String
)
property
.
getValue
());
}
Id
id
=
new
Id
(
guid
,
instanceVertex
.<
Integer
>
getProperty
(
"version"
),
typeName
);
Id
id
=
new
Id
(
guid
,
instanceVertex
.<
Integer
>
getProperty
(
Constants
.
VERSION_PROPERTY_KEY
),
typeName
);
LOG
.
debug
(
"Created id {} for instance type {}"
,
id
,
typeName
);
ClassType
classType
=
typeSystem
.
getDataType
(
ClassType
.
class
,
typeName
);
...
...
@@ -607,7 +646,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
return
typedInstance
;
}
p
rivate
void
mapVertexToInstanceTraits
(
Vertex
instanceVertex
,
p
ublic
void
mapVertexToInstanceTraits
(
Vertex
instanceVertex
,
ITypedReferenceableInstance
typedInstance
,
List
<
String
>
traits
)
throws
MetadataException
{
for
(
String
traitName
:
traits
)
{
...
...
@@ -618,11 +657,19 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
}
private
void
mapVertexToInstance
(
Vertex
instanceVertex
,
ITypedInstance
typedInstance
,
Map
<
String
,
AttributeInfo
>
fields
)
throws
MetadataException
{
public
void
mapVertexToInstance
(
Vertex
instanceVertex
,
ITypedInstance
typedInstance
,
Map
<
String
,
AttributeInfo
>
fields
)
throws
MetadataException
{
LOG
.
debug
(
"Mapping vertex {} to instance {} for fields"
,
instanceVertex
,
typedInstance
.
getTypeName
(),
fields
);
for
(
AttributeInfo
attributeInfo
:
fields
.
values
())
{
mapVertexToAttribute
(
instanceVertex
,
typedInstance
,
attributeInfo
);
}
}
public
void
mapVertexToAttribute
(
Vertex
instanceVertex
,
ITypedInstance
typedInstance
,
AttributeInfo
attributeInfo
)
throws
MetadataException
{
LOG
.
debug
(
"mapping attributeInfo = "
+
attributeInfo
);
final
IDataType
dataType
=
attributeInfo
.
dataType
();
final
String
vertexPropertyName
=
...
...
@@ -630,7 +677,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
switch
(
dataType
.
getTypeCategory
())
{
case
PRIMITIVE:
mapVertexToInstanc
e
(
instanceVertex
,
typedInstance
,
attributeInfo
);
mapVertexToPrimitiv
e
(
instanceVertex
,
typedInstance
,
attributeInfo
);
break
;
// add only if vertex has this attribute
case
ENUM:
...
...
@@ -655,7 +702,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
break
;
case
CLASS:
String
relationshipLabel
=
typedInstance
.
getTypeName
()
+
"."
+
attributeInfo
.
name
;
String
relationshipLabel
=
typedInstance
.
getTypeName
()
+
"."
+
attributeInfo
.
name
;
Object
idOrInstance
=
mapClassReferenceToVertex
(
instanceVertex
,
attributeInfo
,
relationshipLabel
,
attributeInfo
.
dataType
());
typedInstance
.
set
(
attributeInfo
.
name
,
idOrInstance
);
...
...
@@ -665,9 +713,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
break
;
}
}
}
p
rivate
Object
mapClassReferenceToVertex
(
Vertex
instanceVertex
,
p
ublic
Object
mapClassReferenceToVertex
(
Vertex
instanceVertex
,
AttributeInfo
attributeInfo
,
String
relationshipLabel
,
IDataType
dataType
)
throws
MetadataException
{
...
...
@@ -697,7 +744,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
@SuppressWarnings
(
"unchecked"
)
p
rivate
void
mapVertexToArrayInstance
(
Vertex
instanceVertex
,
ITypedInstance
typedInstance
,
p
ublic
void
mapVertexToArrayInstance
(
Vertex
instanceVertex
,
ITypedInstance
typedInstance
,
AttributeInfo
attributeInfo
)
throws
MetadataException
{
LOG
.
debug
(
"mapping vertex {} to array {}"
,
instanceVertex
,
attributeInfo
.
name
);
String
propertyName
=
typedInstance
.
getTypeName
()
+
"."
+
attributeInfo
.
name
;
...
...
@@ -717,10 +764,12 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
typedInstance
.
set
(
attributeInfo
.
name
,
values
);
}
p
rivate
Object
mapVertexToCollectionEntry
(
Vertex
instanceVertex
,
p
ublic
Object
mapVertexToCollectionEntry
(
Vertex
instanceVertex
,
AttributeInfo
attributeInfo
,
IDataType
elementType
,
String
propertyNameWithSuffix
)
throws
MetadataException
{
String
propertyNameWithSuffix
)
throws
MetadataException
{
switch
(
elementType
.
getTypeCategory
())
{
case
PRIMITIVE:
return
instanceVertex
.
getProperty
(
propertyNameWithSuffix
);
...
...
@@ -830,7 +879,14 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
TraitType
traitType
)
throws
MetadataException
{
ITypedStruct
traitInstance
=
(
ITypedStruct
)
typedInstance
.
getTrait
(
traitName
);
String
relationshipLabel
=
typedInstance
.
getTypeName
()
+
"."
+
traitName
;
mapVertexToTraitInstance
(
instanceVertex
,
typedInstance
.
getTypeName
(),
traitName
,
traitType
,
traitInstance
);
}
public
void
mapVertexToTraitInstance
(
Vertex
instanceVertex
,
String
typeName
,
String
traitName
,
TraitType
traitType
,
ITypedStruct
traitInstance
)
throws
MetadataException
{
String
relationshipLabel
=
typeName
+
"."
+
traitName
;
LOG
.
debug
(
"Finding edge for {} -> label {} "
,
instanceVertex
,
relationshipLabel
);
for
(
Edge
edge
:
instanceVertex
.
getEdges
(
Direction
.
OUT
,
relationshipLabel
))
{
final
Vertex
traitInstanceVertex
=
edge
.
getVertex
(
Direction
.
IN
);
...
...
@@ -844,7 +900,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
}
private
void
mapVertexTo
Instanc
e
(
Vertex
instanceVertex
,
private
void
mapVertexTo
Primitiv
e
(
Vertex
instanceVertex
,
ITypedInstance
typedInstance
,
AttributeInfo
attributeInfo
)
throws
MetadataException
{
LOG
.
debug
(
"Adding primitive {} from vertex {}"
,
attributeInfo
,
instanceVertex
);
...
...
repository/src/main/java/org/apache/hadoop/metadata/repository/graph/GraphBackedSearchIndexer.java
View file @
9d01934b
...
...
@@ -116,7 +116,8 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
case
ENUM:
case
ARRAY:
case
MAP:
// do nothing since these are NOT types
// do nothing since these are only attributes
// and not types like structs, traits or classes
break
;
case
STRUCT:
...
...
repository/src/main/java/org/apache/hadoop/metadata/search/DefaultGraphPersistenceStrategy.java
0 → 100644
View file @
9d01934b
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
search
;
import
com.thinkaurelius.titan.core.TitanVertex
;
import
org.apache.hadoop.metadata.ITypedReferenceableInstance
;
import
org.apache.hadoop.metadata.ITypedStruct
;
import
org.apache.hadoop.metadata.MetadataException
;
import
org.apache.hadoop.metadata.query.Expressions
;
import
org.apache.hadoop.metadata.query.GraphPersistenceStrategies
;
import
org.apache.hadoop.metadata.query.TypeUtils
;
import
org.apache.hadoop.metadata.repository.MetadataRepository
;
import
org.apache.hadoop.metadata.repository.graph.Constants
;
import
org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository
;
import
org.apache.hadoop.metadata.storage.Id
;
import
org.apache.hadoop.metadata.types.AttributeInfo
;
import
org.apache.hadoop.metadata.types.IDataType
;
import
org.apache.hadoop.metadata.types.Multiplicity
;
import
org.apache.hadoop.metadata.types.StructType
;
import
org.apache.hadoop.metadata.types.TraitType
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.util.List
;
/**
* Default implementation of GraphPersistenceStrategy.
*/
public
class
DefaultGraphPersistenceStrategy
implements
GraphPersistenceStrategies
{
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
DefaultGraphPersistenceStrategy
.
class
);
private
final
GraphBackedMetadataRepository
metadataRepository
;
public
DefaultGraphPersistenceStrategy
(
MetadataRepository
metadataRepository
)
{
this
.
metadataRepository
=
(
GraphBackedMetadataRepository
)
metadataRepository
;
}
@Override
public
String
typeAttributeName
()
{
return
metadataRepository
.
getTypeAttributeName
();
}
@Override
public
String
edgeLabel
(
IDataType
<?>
dataType
,
AttributeInfo
aInfo
)
{
return
metadataRepository
.
getEdgeLabel
(
dataType
,
aInfo
);
}
@Override
public
String
traitLabel
(
IDataType
<?>
dataType
,
String
traitName
)
{
return
metadataRepository
.
getTraitLabel
(
dataType
,
traitName
);
}
@Override
public
String
fieldNameInVertex
(
IDataType
<?>
dataType
,
AttributeInfo
aInfo
)
{
return
metadataRepository
.
getFieldNameInVertex
(
dataType
,
aInfo
);
}
@Override
public
List
<
String
>
traitNames
(
TitanVertex
vertex
)
{
return
metadataRepository
.
getTraitNames
(
vertex
);
}
@Override
public
String
fieldPrefixInSelect
()
{
return
"it"
;
}
@Override
public
Id
getIdFromVertex
(
String
dataTypeName
,
TitanVertex
vertex
)
{
return
metadataRepository
.
getIdFromVertex
(
dataTypeName
,
vertex
);
}
@Override
public
<
U
>
U
constructInstance
(
IDataType
<
U
>
dataType
,
Object
value
)
{
try
{
switch
(
dataType
.
getTypeCategory
())
{
case
PRIMITIVE:
case
ENUM:
return
dataType
.
convert
(
value
,
Multiplicity
.
OPTIONAL
);
case
ARRAY:
// todo
break
;
case
MAP:
// todo
break
;
case
STRUCT:
TitanVertex
structVertex
=
(
TitanVertex
)
value
;
StructType
structType
=
(
StructType
)
dataType
;
ITypedStruct
structInstance
=
structType
.
createInstance
();
metadataRepository
.
getGraphToInstanceMapper
().
mapVertexToInstance
(
structVertex
,
structInstance
,
structType
.
fieldMapping
().
fields
);
return
dataType
.
convert
(
structInstance
,
Multiplicity
.
OPTIONAL
);
case
TRAIT:
TitanVertex
traitVertex
=
(
TitanVertex
)
value
;
TraitType
traitType
=
(
TraitType
)
dataType
;
ITypedStruct
traitInstance
=
traitType
.
createInstance
();
// todo - this is not right, we should load the Instance associated with this
// trait. for now just loading the trait struct.
// metadataRepository.getGraphToInstanceMapper().mapVertexToTraitInstance(
// traitVertex, dataType.getName(), , traitType, traitInstance);
metadataRepository
.
getGraphToInstanceMapper
().
mapVertexToInstance
(
traitVertex
,
traitInstance
,
traitType
.
fieldMapping
().
fields
);
break
;
case
CLASS:
TitanVertex
classVertex
=
(
TitanVertex
)
value
;
ITypedReferenceableInstance
classInstance
=
metadataRepository
.
getGraphToInstanceMapper
().
mapGraphToTypedInstance
(
classVertex
.<
String
>
getProperty
(
Constants
.
GUID_PROPERTY_KEY
),
classVertex
);
return
dataType
.
convert
(
classInstance
,
Multiplicity
.
OPTIONAL
);
default
:
throw
new
UnsupportedOperationException
(
"Load for type "
+
dataType
+
"is not supported"
);
}
}
catch
(
MetadataException
e
)
{
LOG
.
error
(
"error while constructing an instance"
,
e
);
}
return
null
;
}
@Override
public
String
edgeLabel
(
TypeUtils
.
FieldInfo
fInfo
)
{
return
fInfo
.
reverseDataType
()
==
null
?
edgeLabel
(
fInfo
.
dataType
(),
fInfo
.
attrInfo
())
:
edgeLabel
(
fInfo
.
reverseDataType
(),
fInfo
.
attrInfo
());
}
@Override
public
String
gremlinCompOp
(
Expressions
.
ComparisonExpression
op
)
{
switch
(
op
.
symbol
())
{
case
"="
:
return
"T.eq"
;
case
"!="
:
return
"T.neq"
;
case
">"
:
return
"T.gt"
;
case
">="
:
return
"T.gte"
;
case
"<"
:
return
"T.lt"
;
case
"<="
:
return
"T.lte"
;
default
:
throw
new
RuntimeException
((
"Comparison operator not supported in Gremlin: "
+
op
));
}
}
@Override
public
String
loopObjectExpression
(
IDataType
<?>
dataType
)
{
return
"{it.object."
+
typeAttributeName
()
+
" == '"
+
dataType
.
getName
()
+
"'}"
;
}
}
repository/src/test/java/org/apache/hadoop/metadata/discovery/GraphBackedDiscoveryServiceTest.java
View file @
9d01934b
...
...
@@ -26,8 +26,11 @@ import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository
import
org.apache.hadoop.metadata.types.ClassType
;
import
org.apache.hadoop.metadata.types.Multiplicity
;
import
org.apache.hadoop.metadata.types.TypeSystem
;
import
org.testng.annotations.AfterMethod
;
import
org.testng.annotations.BeforeMethod
;
import
org.codehaus.jettison.json.JSONArray
;
import
org.codehaus.jettison.json.JSONObject
;
import
org.testng.Assert
;
import
org.testng.annotations.AfterClass
;
import
org.testng.annotations.BeforeClass
;
import
org.testng.annotations.Guice
;
import
org.testng.annotations.Test
;
...
...
@@ -36,32 +39,66 @@ import javax.inject.Inject;
@Guice
(
modules
=
RepositoryMetadataModule
.
class
)
public
class
GraphBackedDiscoveryServiceTest
{
private
TypeSystem
typeSystem
;
@Inject
private
GraphBackedMetadataRepository
repositoryService
;
@Inject
private
GraphBackedDiscoveryService
discoveryService
;
@Before
Method
@Before
Class
public
void
setUp
()
throws
Exception
{
typeSystem
=
TypeSystem
.
getInstance
();
}
@AfterMethod
public
void
tearDown
()
throws
Exception
{
TypeSystem
typeSystem
=
TypeSystem
.
getInstance
();
typeSystem
.
reset
();
}
TestUtils
.
defineDeptEmployeeTypes
(
typeSystem
);
@Test
public
void
testRawSearch1
()
throws
Exception
{
Referenceable
hrDept
=
TestUtils
.
createDeptEg1
(
typeSystem
);
ClassType
deptType
=
typeSystem
.
getDataType
(
ClassType
.
class
,
"Department"
);
ITypedReferenceableInstance
hrDept2
=
deptType
.
convert
(
hrDept
,
Multiplicity
.
REQUIRED
);
repositoryService
.
createEntity
(
hrDept2
,
"Department"
);
}
@AfterClass
public
void
tearDown
()
throws
Exception
{
TypeSystem
.
getInstance
().
reset
();
}
@Test
public
void
testSearchByDSL
()
throws
Exception
{
String
dslQuery
=
"from Department"
;
String
jsonResults
=
discoveryService
.
searchByDSL
(
dslQuery
);
Assert
.
assertNotNull
(
jsonResults
);
JSONObject
results
=
new
JSONObject
(
jsonResults
);
Assert
.
assertEquals
(
results
.
length
(),
3
);
System
.
out
.
println
(
"results = "
+
results
);
Object
query
=
results
.
get
(
"query"
);
Assert
.
assertNotNull
(
query
);
JSONObject
dataType
=
results
.
getJSONObject
(
"dataType"
);
Assert
.
assertNotNull
(
dataType
);
String
typeName
=
dataType
.
getString
(
"typeName"
);
Assert
.
assertNotNull
(
typeName
);
Assert
.
assertEquals
(
typeName
,
"Department"
);
JSONArray
rows
=
results
.
getJSONArray
(
"rows"
);
Assert
.
assertNotNull
(
rows
);
Assert
.
assertEquals
(
rows
.
length
(),
1
);
}
@Test
(
expectedExceptions
=
Throwable
.
class
)
public
void
testSearchByDSLBadQuery
()
throws
Exception
{
String
dslQuery
=
"from blah"
;
discoveryService
.
searchByDSL
(
dslQuery
);
Assert
.
fail
();
}
@Test
public
void
testRawSearch1
()
throws
Exception
{
// Query for all Vertices in Graph
Object
r
=
discoveryService
.
searchByGremlin
(
"g.V.toList()"
);
System
.
out
.
println
(
"search result = "
+
r
);
...
...
@@ -74,14 +111,4 @@ public class GraphBackedDiscoveryServiceTest {
r
=
discoveryService
.
searchByGremlin
(
"g.V.filter{it.typeName == 'Person'}.'Person.name'.toList()"
);
System
.
out
.
println
(
"search result = "
+
r
);
}
@Test
public
void
testTextSearch
()
throws
Exception
{
}
@Test
public
void
testRelationshipWalk
()
throws
Exception
{
}
}
\ No newline at end of file
repository/src/test/resources/graph.properties
View file @
9d01934b
...
...
@@ -19,5 +19,7 @@
storage.backend
=
inmemory
# Graph Search Index
index.search.backend
=
lucene
index.search.backend
=
elasticsearch
index.search.directory
=
target/data/es
index.search.elasticsearch.client-only
=
false
index.search.elasticsearch.local-mode
=
true
webapp/src/test/java/org/apache/hadoop/metadata/web/resources/EntityJerseyResourceIT.java
View file @
9d01934b
...
...
@@ -39,7 +39,6 @@ import org.apache.hadoop.metadata.types.Multiplicity;
import
org.apache.hadoop.metadata.types.StructTypeDefinition
;
import
org.apache.hadoop.metadata.types.TraitType
;
import
org.codehaus.jettison.json.JSONArray
;
import
org.codehaus.jettison.json.JSONException
;
import
org.codehaus.jettison.json.JSONObject
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
...
...
@@ -304,26 +303,6 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
sumbitType
(
typesAsJSON
,
TABLE_TYPE
);
}
private
void
sumbitType
(
String
typesAsJSON
,
String
type
)
throws
JSONException
{
WebResource
resource
=
service
.
path
(
"api/metadata/types/submit"
)
.
path
(
type
);
ClientResponse
clientResponse
=
resource
.
accept
(
MediaType
.
APPLICATION_JSON
)
.
type
(
MediaType
.
APPLICATION_JSON
)
.
method
(
HttpMethod
.
POST
,
ClientResponse
.
class
,
typesAsJSON
);
Assert
.
assertEquals
(
clientResponse
.
getStatus
(),
Response
.
Status
.
OK
.
getStatusCode
());
String
responseAsString
=
clientResponse
.
getEntity
(
String
.
class
);
Assert
.
assertNotNull
(
responseAsString
);
JSONObject
response
=
new
JSONObject
(
responseAsString
);
Assert
.
assertEquals
(
response
.
get
(
"typeName"
),
type
);
Assert
.
assertNotNull
(
response
.
get
(
"types"
));
Assert
.
assertNotNull
(
response
.
get
(
"requestId"
));
}
private
ITypedReferenceableInstance
createHiveTableInstance
()
throws
Exception
{
Referenceable
databaseInstance
=
new
Referenceable
(
DATABASE_TYPE
);
databaseInstance
.
set
(
"name"
,
DATABASE_NAME
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment