Commit 085d5c86 by Shwetha GS

ATLAS-347 Atlas search APIs should allow pagination of results (shwethags)

parent 67acb9d6
...@@ -53,7 +53,6 @@ import org.apache.hadoop.hive.ql.metadata.Table; ...@@ -53,7 +53,6 @@ import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -218,7 +217,7 @@ public class HiveMetaStoreBridge { ...@@ -218,7 +217,7 @@ public class HiveMetaStoreBridge {
private Referenceable getEntityReferenceFromDSL(String typeName, String dslQuery) throws Exception { private Referenceable getEntityReferenceFromDSL(String typeName, String dslQuery) throws Exception {
AtlasClient dgiClient = getAtlasClient(); AtlasClient dgiClient = getAtlasClient();
JSONArray results = dgiClient.searchByDSL(dslQuery); JSONArray results = dgiClient.searchByDSL(dslQuery, 1, 0);
if (results.length() == 0) { if (results.length() == 0) {
return null; return null;
} else { } else {
...@@ -501,17 +500,6 @@ public class HiveMetaStoreBridge { ...@@ -501,17 +500,6 @@ public class HiveMetaStoreBridge {
atlasClient.updateEntity(referenceable.getId().id, referenceable); atlasClient.updateEntity(referenceable.getId().id, referenceable);
} }
private Referenceable getEntityReferenceFromGremlin(String typeName, String gremlinQuery)
throws AtlasServiceException, JSONException {
AtlasClient client = getAtlasClient();
JSONArray results = client.searchByGremlin(gremlinQuery);
if (results.length() == 0) {
return null;
}
String guid = results.getJSONObject(0).getString(SEARCH_ENTRY_GUID_ATTR);
return new Referenceable(guid, typeName, null);
}
public Referenceable fillStorageDesc(StorageDescriptor storageDesc, String tableQualifiedName, public Referenceable fillStorageDesc(StorageDescriptor storageDesc, String tableQualifiedName,
String sdQualifiedName, Id tableId) throws Exception { String sdQualifiedName, Id tableId) throws Exception {
LOG.debug("Filling storage descriptor information for " + storageDesc); LOG.debug("Filling storage descriptor information for " + storageDesc);
......
...@@ -32,7 +32,6 @@ import org.apache.hadoop.hive.ql.metadata.Table; ...@@ -32,7 +32,6 @@ import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextInputFormat;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.mockito.ArgumentMatcher; import org.mockito.ArgumentMatcher;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.MockitoAnnotations; import org.mockito.MockitoAnnotations;
...@@ -41,7 +40,6 @@ import org.testng.annotations.BeforeMethod; ...@@ -41,7 +40,6 @@ import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import scala.actors.threadpool.Arrays; import scala.actors.threadpool.Arrays;
import java.net.SocketTimeoutException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
...@@ -98,12 +96,12 @@ public class HiveMetaStoreBridgeTest { ...@@ -98,12 +96,12 @@ public class HiveMetaStoreBridgeTest {
// return existing table // return existing table
when(atlasClient.searchByDSL(HiveMetaStoreBridge.getTableDSLQuery(CLUSTER_NAME, TEST_DB_NAME, TEST_TABLE_NAME, when(atlasClient.searchByDSL(HiveMetaStoreBridge.getTableDSLQuery(CLUSTER_NAME, TEST_DB_NAME, TEST_TABLE_NAME,
HiveDataTypes.HIVE_TABLE.getName(), false))).thenReturn( HiveDataTypes.HIVE_TABLE.getName(), false), 1, 0)).thenReturn(
getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77")); getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77"));
when(atlasClient.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")).thenReturn(createTableReference()); when(atlasClient.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")).thenReturn(createTableReference());
String processQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, hiveTables.get(0)); String processQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, hiveTables.get(0));
when(atlasClient.searchByDSL(HiveMetaStoreBridge.getProcessDSLQuery(HiveDataTypes.HIVE_PROCESS.getName(), when(atlasClient.searchByDSL(HiveMetaStoreBridge.getProcessDSLQuery(HiveDataTypes.HIVE_PROCESS.getName(),
processQualifiedName))).thenReturn(getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77")); processQualifiedName), 1, 0)).thenReturn(getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77"));
HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(CLUSTER_NAME, hiveClient, atlasClient); HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(CLUSTER_NAME, hiveClient, atlasClient);
bridge.importHiveMetadata(true); bridge.importHiveMetadata(true);
...@@ -117,7 +115,7 @@ public class HiveMetaStoreBridgeTest { ...@@ -117,7 +115,7 @@ public class HiveMetaStoreBridgeTest {
private void returnExistingDatabase(String databaseName, AtlasClient atlasClient, String clusterName) private void returnExistingDatabase(String databaseName, AtlasClient atlasClient, String clusterName)
throws AtlasServiceException, JSONException { throws AtlasServiceException, JSONException {
when(atlasClient.searchByDSL(HiveMetaStoreBridge.getDatabaseDSLQuery(clusterName, databaseName, when(atlasClient.searchByDSL(HiveMetaStoreBridge.getDatabaseDSLQuery(clusterName, databaseName,
HiveDataTypes.HIVE_DB.getName()))).thenReturn( HiveDataTypes.HIVE_DB.getName()), 1, 0)).thenReturn(
getEntityReference("72e06b34-9151-4023-aa9d-b82103a50e76")); getEntityReference("72e06b34-9151-4023-aa9d-b82103a50e76"));
} }
...@@ -147,12 +145,11 @@ public class HiveMetaStoreBridgeTest { ...@@ -147,12 +145,11 @@ public class HiveMetaStoreBridgeTest {
returnExistingDatabase(TEST_DB_NAME, atlasClient, CLUSTER_NAME); returnExistingDatabase(TEST_DB_NAME, atlasClient, CLUSTER_NAME);
when(atlasClient.searchByDSL(HiveMetaStoreBridge.getTableDSLQuery(CLUSTER_NAME, TEST_DB_NAME, when(atlasClient.searchByDSL(HiveMetaStoreBridge.getTableDSLQuery(CLUSTER_NAME, TEST_DB_NAME,
TEST_TABLE_NAME, TEST_TABLE_NAME, HiveDataTypes.HIVE_TABLE.getName(), false), 1, 0)).thenReturn(
HiveDataTypes.HIVE_TABLE.getName(), false))).thenReturn(
getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77")); getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77"));
String processQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, hiveTable); String processQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, hiveTable);
when(atlasClient.searchByDSL(HiveMetaStoreBridge.getProcessDSLQuery(HiveDataTypes.HIVE_PROCESS.getName(), when(atlasClient.searchByDSL(HiveMetaStoreBridge.getProcessDSLQuery(HiveDataTypes.HIVE_PROCESS.getName(),
processQualifiedName))).thenReturn(getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77")); processQualifiedName), 1, 0)).thenReturn(getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77"));
when(atlasClient.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")).thenReturn(createTableReference()); when(atlasClient.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")).thenReturn(createTableReference());
Partition partition = mock(Partition.class); Partition partition = mock(Partition.class);
...@@ -180,13 +177,12 @@ public class HiveMetaStoreBridgeTest { ...@@ -180,13 +177,12 @@ public class HiveMetaStoreBridgeTest {
when(hiveClient.getTable(TEST_DB_NAME, TEST_TABLE_NAME)).thenThrow(new RuntimeException("Timeout while reading data from hive metastore")); when(hiveClient.getTable(TEST_DB_NAME, TEST_TABLE_NAME)).thenThrow(new RuntimeException("Timeout while reading data from hive metastore"));
when(atlasClient.searchByDSL(HiveMetaStoreBridge.getTableDSLQuery(CLUSTER_NAME, TEST_DB_NAME, when(atlasClient.searchByDSL(HiveMetaStoreBridge.getTableDSLQuery(CLUSTER_NAME, TEST_DB_NAME,
table2Name, table2Name, HiveDataTypes.HIVE_TABLE.getName(), false), 1, 0)).thenReturn(
HiveDataTypes.HIVE_TABLE.getName(), false))).thenReturn(
getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77")); getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77"));
when(atlasClient.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")).thenReturn(createTableReference()); when(atlasClient.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")).thenReturn(createTableReference());
String processQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, hiveTables.get(1)); String processQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, hiveTables.get(1));
when(atlasClient.searchByDSL(HiveMetaStoreBridge.getProcessDSLQuery(HiveDataTypes.HIVE_PROCESS.getName(), when(atlasClient.searchByDSL(HiveMetaStoreBridge.getProcessDSLQuery(HiveDataTypes.HIVE_PROCESS.getName(),
processQualifiedName))).thenReturn(getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77")); processQualifiedName), 1, 0)).thenReturn(getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77"));
HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(CLUSTER_NAME, hiveClient, atlasClient); HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(CLUSTER_NAME, hiveClient, atlasClient);
try { try {
...@@ -206,13 +202,12 @@ public class HiveMetaStoreBridgeTest { ...@@ -206,13 +202,12 @@ public class HiveMetaStoreBridgeTest {
when(hiveClient.getTable(TEST_DB_NAME, TEST_TABLE_NAME)).thenThrow(new RuntimeException("Timeout while reading data from hive metastore")); when(hiveClient.getTable(TEST_DB_NAME, TEST_TABLE_NAME)).thenThrow(new RuntimeException("Timeout while reading data from hive metastore"));
when(atlasClient.searchByDSL(HiveMetaStoreBridge.getTableDSLQuery(CLUSTER_NAME, TEST_DB_NAME, when(atlasClient.searchByDSL(HiveMetaStoreBridge.getTableDSLQuery(CLUSTER_NAME, TEST_DB_NAME,
table2Name, table2Name, HiveDataTypes.HIVE_TABLE.getName(), false), 10, 0)).thenReturn(
HiveDataTypes.HIVE_TABLE.getName(), false))).thenReturn(
getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77")); getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77"));
when(atlasClient.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")).thenReturn(createTableReference()); when(atlasClient.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")).thenReturn(createTableReference());
String processQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, hiveTables.get(1)); String processQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, hiveTables.get(1));
when(atlasClient.searchByDSL(HiveMetaStoreBridge.getProcessDSLQuery(HiveDataTypes.HIVE_PROCESS.getName(), when(atlasClient.searchByDSL(HiveMetaStoreBridge.getProcessDSLQuery(HiveDataTypes.HIVE_PROCESS.getName(),
processQualifiedName))).thenReturn(getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77")); processQualifiedName), 10, 0)).thenReturn(getEntityReference("82e06b34-9151-4023-aa9d-b82103a50e77"));
HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(CLUSTER_NAME, hiveClient, atlasClient); HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(CLUSTER_NAME, hiveClient, atlasClient);
try { try {
...@@ -255,6 +250,4 @@ public class HiveMetaStoreBridgeTest { ...@@ -255,6 +250,4 @@ public class HiveMetaStoreBridgeTest {
return attrValue.equals(((Referenceable) o).get(attrName)); return attrValue.equals(((Referenceable) o).get(attrName));
} }
} }
} }
...@@ -131,12 +131,12 @@ public class SqoopHookIT { ...@@ -131,12 +131,12 @@ public class SqoopHookIT {
waitFor(MAX_WAIT_TIME, new Predicate() { waitFor(MAX_WAIT_TIME, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
JSONArray results = atlasClient.search(query); JSONArray results = atlasClient.search(query, 10, 0);
return results.length() > 0; return results.length() > 0;
} }
}); });
JSONArray results = atlasClient.search(query); JSONArray results = atlasClient.search(query, 10, 0);
JSONObject row = results.getJSONObject(0).getJSONObject("t"); JSONObject row = results.getJSONObject(0).getJSONObject("t");
return row.getString("id"); return row.getString("id");
......
...@@ -139,7 +139,7 @@ public class StormAtlasHookIT { ...@@ -139,7 +139,7 @@ public class StormAtlasHookIT {
String query = String.format("from %s where name = \"%s\"", String query = String.format("from %s where name = \"%s\"",
StormDataTypes.STORM_TOPOLOGY.getName(), TOPOLOGY_NAME); StormDataTypes.STORM_TOPOLOGY.getName(), TOPOLOGY_NAME);
JSONArray results = atlasClient.search(query); JSONArray results = atlasClient.search(query, 10, 0);
JSONObject row = results.getJSONObject(0); JSONObject row = results.getJSONObject(0);
return row.has("$id$") ? row.getJSONObject("$id$").getString("id"): null; return row.has("$id$") ? row.getJSONObject("$id$").getString("id"): null;
......
...@@ -101,6 +101,8 @@ public class AtlasClient { ...@@ -101,6 +101,8 @@ public class AtlasClient {
public static final String URI_TRAITS = "traits"; public static final String URI_TRAITS = "traits";
public static final String QUERY = "query"; public static final String QUERY = "query";
public static final String LIMIT = "limit";
public static final String OFFSET = "offset";
public static final String QUERY_TYPE = "queryType"; public static final String QUERY_TYPE = "queryType";
public static final String ATTRIBUTE_NAME = "property"; public static final String ATTRIBUTE_NAME = "property";
public static final String ATTRIBUTE_VALUE = "value"; public static final String ATTRIBUTE_VALUE = "value";
...@@ -479,7 +481,6 @@ public class AtlasClient { ...@@ -479,7 +481,6 @@ public class AtlasClient {
//Search operations //Search operations
SEARCH(BASE_URI + URI_SEARCH, HttpMethod.GET, Response.Status.OK), SEARCH(BASE_URI + URI_SEARCH, HttpMethod.GET, Response.Status.OK),
SEARCH_DSL(BASE_URI + URI_SEARCH + "/dsl", HttpMethod.GET, Response.Status.OK), SEARCH_DSL(BASE_URI + URI_SEARCH + "/dsl", HttpMethod.GET, Response.Status.OK),
SEARCH_GREMLIN(BASE_URI + URI_SEARCH + "/gremlin", HttpMethod.GET, Response.Status.OK),
SEARCH_FULL_TEXT(BASE_URI + URI_SEARCH + "/fulltext", HttpMethod.GET, Response.Status.OK), SEARCH_FULL_TEXT(BASE_URI + URI_SEARCH + "/fulltext", HttpMethod.GET, Response.Status.OK),
//Lineage operations based on dataset name //Lineage operations based on dataset name
...@@ -981,17 +982,21 @@ public class AtlasClient { ...@@ -981,17 +982,21 @@ public class AtlasClient {
} }
/** /**
* Search using gremlin/dsl/full text * Search using dsl/full text
* @param searchQuery * @param searchQuery
* @return * @param limit number of rows to be returned in the result, used for pagination. maxlimit > limit > 0. -1 maps to atlas.search.defaultlimit property value
* @param offset offset to the results returned, used for pagination. offset >= 0. -1 maps to offset 0
* @return Query results
* @throws AtlasServiceException * @throws AtlasServiceException
*/ */
public JSONArray search(final String searchQuery) throws AtlasServiceException { public JSONArray search(final String searchQuery, final int limit, final int offset) throws AtlasServiceException {
JSONObject result = callAPIWithRetries(API.SEARCH, null, new ResourceCreator() { JSONObject result = callAPIWithRetries(API.SEARCH, null, new ResourceCreator() {
@Override @Override
public WebResource createResource() { public WebResource createResource() {
WebResource resource = getResource(API.SEARCH); WebResource resource = getResource(API.SEARCH);
resource = resource.queryParam(QUERY, searchQuery); resource = resource.queryParam(QUERY, searchQuery);
resource = resource.queryParam(LIMIT, String.valueOf(limit));
resource = resource.queryParam(OFFSET, String.valueOf(offset));
return resource; return resource;
} }
}); });
...@@ -1006,39 +1011,20 @@ public class AtlasClient { ...@@ -1006,39 +1011,20 @@ public class AtlasClient {
/** /**
* Search given query DSL * Search given query DSL
* @param query DSL query * @param query DSL query
* @param limit number of rows to be returned in the result, used for pagination. maxlimit > limit > 0. -1 maps to atlas.search.defaultlimit property value
* @param offset offset to the results returned, used for pagination. offset >= 0. -1 maps to offset 0
* @return result json object * @return result json object
* @throws AtlasServiceException * @throws AtlasServiceException
*/ */
public JSONArray searchByDSL(final String query) throws AtlasServiceException { public JSONArray searchByDSL(final String query, final int limit, final int offset) throws AtlasServiceException {
LOG.debug("DSL query: {}", query); LOG.debug("DSL query: {}", query);
JSONObject result = callAPIWithRetries(API.SEARCH_DSL, null, new ResourceCreator() { JSONObject result = callAPIWithRetries(API.SEARCH_DSL, null, new ResourceCreator() {
@Override @Override
public WebResource createResource() { public WebResource createResource() {
WebResource resource = getResource(API.SEARCH_DSL); WebResource resource = getResource(API.SEARCH_DSL);
resource = resource.queryParam(QUERY, query); resource = resource.queryParam(QUERY, query);
return resource; resource = resource.queryParam(LIMIT, String.valueOf(limit));
} resource = resource.queryParam(OFFSET, String.valueOf(offset));
});
try {
return result.getJSONArray(RESULTS);
} catch (JSONException e) {
throw new AtlasServiceException(e);
}
}
/**
* Search given gremlin query
* @param gremlinQuery Gremlin query
* @return result json object
* @throws AtlasServiceException
*/
public JSONArray searchByGremlin(final String gremlinQuery) throws AtlasServiceException {
LOG.debug("Gremlin query: " + gremlinQuery);
JSONObject result = callAPIWithRetries(API.SEARCH_GREMLIN, null, new ResourceCreator() {
@Override
public WebResource createResource() {
WebResource resource = getResource(API.SEARCH_GREMLIN);
resource = resource.queryParam(QUERY, gremlinQuery);
return resource; return resource;
} }
}); });
...@@ -1052,15 +1038,20 @@ public class AtlasClient { ...@@ -1052,15 +1038,20 @@ public class AtlasClient {
/** /**
* Search given full text search * Search given full text search
* @param query Query * @param query Query
* @param limit number of rows to be returned in the result, used for pagination. maxlimit > limit > 0. -1 maps to atlas.search.defaultlimit property value
* @param offset offset to the results returned, used for pagination. offset >= 0. -1 maps to offset 0
* NOTE: Pagination is not implemented currently for full text search, so limit and offset are not used
* @return result json object * @return result json object
* @throws AtlasServiceException * @throws AtlasServiceException
*/ */
public JSONObject searchByFullText(final String query) throws AtlasServiceException { public JSONObject searchByFullText(final String query, final int limit, final int offset) throws AtlasServiceException {
return callAPIWithRetries(API.SEARCH_FULL_TEXT, null, new ResourceCreator() { return callAPIWithRetries(API.SEARCH_FULL_TEXT, null, new ResourceCreator() {
@Override @Override
public WebResource createResource() { public WebResource createResource() {
WebResource resource = getResource(API.SEARCH_FULL_TEXT); WebResource resource = getResource(API.SEARCH_FULL_TEXT);
resource = resource.queryParam(QUERY, query); resource = resource.queryParam(QUERY, query);
resource = resource.queryParam(LIMIT, String.valueOf(limit));
resource = resource.queryParam(OFFSET, String.valueOf(offset));
return resource; return resource;
} }
}); });
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas;
import org.apache.commons.configuration.Configuration;
/**
* Utility for reading properties in atlas-application.properties.
*/
public final class AtlasProperties {
private static final Configuration APPLICATION_PROPERTIES;
private AtlasProperties() { }
static {
try {
APPLICATION_PROPERTIES = ApplicationProperties.get();
} catch (AtlasException e) {
throw new RuntimeException(e);
}
}
/**
* Enum that encapsulated each property name and its default value.
*/
public enum AtlasProperty {
SEARCH_MAX_LIMIT("atlas.search.maxlimit", 10000),
SEARCH_DEFAULT_LIMIT("atlas.search.defaultlimit", 100);
private final String propertyName;
private final Object defaultValue;
AtlasProperty(String propertyName, Object defaultValue) {
this.propertyName = propertyName;
this.defaultValue = defaultValue;
}
}
public static <T> T getProperty(AtlasProperty property) {
Object value = APPLICATION_PROPERTIES.getProperty(property.propertyName);
if (value == null) {
return (T) property.defaultValue;
} else {
return (T) value;
}
}
}
...@@ -158,7 +158,7 @@ public final class ParamChecker { ...@@ -158,7 +158,7 @@ public final class ParamChecker {
* @param maxValue * @param maxValue
* @param name * @param name
*/ */
public static void lessThan(short value, short maxValue, String name) { public static void lessThan(long value, long maxValue, String name) {
if (value <= 0) { if (value <= 0) {
throw new IllegalArgumentException(name + " should be > 0, current value " + value); throw new IllegalArgumentException(name + " should be > 0, current value " + value);
} }
...@@ -166,4 +166,10 @@ public final class ParamChecker { ...@@ -166,4 +166,10 @@ public final class ParamChecker {
throw new IllegalArgumentException(name + " should be <= " + maxValue + ", current value " + value); throw new IllegalArgumentException(name + " should be <= " + maxValue + ", current value " + value);
} }
} }
public static void greaterThan(long value, long minValue, String name) {
if (value <= minValue) {
throw new IllegalArgumentException(name + " should be > " + minValue + ", current value " + value);
}
}
} }
...@@ -134,6 +134,18 @@ atlas.lineage.hive.table.schema.query=hive_table where name=?, columns ...@@ -134,6 +134,18 @@ atlas.lineage.hive.table.schema.query=hive_table where name=?, columns
</verbatim> </verbatim>
---++ Search Configs
Search APIs (DSL and full text search) support pagination and have optional limit and offset arguments. Following configs are related to search pagination
<verbatim>
# Default limit used when limit is not specified in API
atlas.search.defaultlimit=100
# Maximum limit allowed in API. Limits maximum results that can be fetched to make sure the atlas server doesn't run out of memory
atlas.search.maxlimit=10000
</verbatim>
---++ Notification Configs ---++ Notification Configs
Refer http://kafka.apache.org/documentation.html#configuration for Kafka configuration. All Kafka configs should be prefixed with 'atlas.kafka.' Refer http://kafka.apache.org/documentation.html#configuration for Kafka configuration. All Kafka configs should be prefixed with 'atlas.kafka.'
......
...@@ -11,9 +11,9 @@ The grammar for the DSL is below. ...@@ -11,9 +11,9 @@ The grammar for the DSL is below.
<verbatim> <verbatim>
queryWithPath: query ~ opt(WITHPATH) queryWithPath: query ~ opt(WITHPATH)
query: rep1sep(singleQuery, opt(COMMA)) query: querySrc ~ opt(loopExpression) ~ opt(selectClause) ~ opt(orderby) ~ opt(limitOffset)
singleQuery: singleQrySrc ~ opt(loopExpression) ~ opt(selectClause) ~ opt(orderby) ~ opt(limitOffset) querySrc: rep1sep(singleQrySrc, opt(COMMA))
singleQrySrc = FROM ~ fromSrc ~ opt(WHERE) ~ opt(expr ^? notIdExpression) | singleQrySrc = FROM ~ fromSrc ~ opt(WHERE) ~ opt(expr ^? notIdExpression) |
WHERE ~ (expr ^? notIdExpression) | WHERE ~ (expr ^? notIdExpression) |
...@@ -22,7 +22,7 @@ singleQrySrc = FROM ~ fromSrc ~ opt(WHERE) ~ opt(expr ^? notIdExpression) | ...@@ -22,7 +22,7 @@ singleQrySrc = FROM ~ fromSrc ~ opt(WHERE) ~ opt(expr ^? notIdExpression) |
fromSrc: identifier ~ AS ~ alias | identifier fromSrc: identifier ~ AS ~ alias | identifier
orderby: ORDERBY ~ order ~ opt (sortOrder) orderby: ORDERBY ~ expr ~ opt (sortOrder)
limitOffset: LIMIT ~ lmt ~ opt (offset) limitOffset: LIMIT ~ lmt ~ opt (offset)
...@@ -87,24 +87,30 @@ Language Notes: ...@@ -87,24 +87,30 @@ Language Notes:
* The _!WithPath_ clause can be used with transitive closure queries to retrieve the Path that * The _!WithPath_ clause can be used with transitive closure queries to retrieve the Path that
connects the two related Entities. (We also provide a higher level interface for Closure Queries connects the two related Entities. (We also provide a higher level interface for Closure Queries
see scaladoc for 'org.apache.atlas.query.ClosureQuery') see scaladoc for 'org.apache.atlas.query.ClosureQuery')
* ORDERBY is optional. Orderby clause should be specified in single quote ('). When order by clause is specified case insensitive sorting is done in ascending order. * ORDERBY is optional. When order by clause is specified, case insensitive sorting is done based on the column specified.
For sorting in descending order specify 'DESC' after order by clause. If no order by is specified then no default sorting is applied. For sorting in descending order specify 'DESC' after order by clause. If no order by is specified, then no default sorting is applied.
* LIMIT is optional. It limits the maximum number of objects to be fetched starting from specified optional offset. If no offset is specified count starts from beginning. * LIMIT is optional. It limits the maximum number of objects to be fetched starting from specified optional offset. If no offset is specified count starts from beginning.
* There are couple of Predicate functions different from SQL: * There are couple of Predicate functions different from SQL:
* _is_ or _isa_can be used to filter Entities that have a particular Trait. * _is_ or _isa_can be used to filter Entities that have a particular Trait.
* _has_ can be used to filter Entities that have a value for a particular Attribute. * _has_ can be used to filter Entities that have a value for a particular Attribute.
* When querying for a space delimited multiple-word identifier, it need to be enclosed within * Any identifiers or constants with special characters(space,$,",{,}) should be enclosed within backquote (`)
backquote (`)
---+++ DSL Examples ---+++ DSL Examples
For the model,
* from DB Asset - attributes name, owner, description
DB - supertype Asset - attributes clusterName, parameters, comment
Column - extends Asset - attributes type, comment
Table - supertype Asset - db, columns, parameters, comment
Traits - PII, Log Data
DSL queries:
* from DB
* DB where name="Reporting" select name, owner * DB where name="Reporting" select name, owner
* DB where name="Reporting" select name, owner orderby 'name' * DB where name="Reporting" select name, owner orderby name
* DB where name="Reporting" select name limit 10 * DB where name="Reporting" select name limit 10
* DB where name="Reporting" select name, owner limit 10 offset 0 * DB where name="Reporting" select name, owner limit 10 offset 0
* DB where name="Reporting" select name, owner orderby 'name' limit 10 offset 5 * DB where name="Reporting" select name, owner orderby name limit 10 offset 5
* DB where name="Reporting" select name, owner orderby 'name' desc limit 10 offset 5 * DB where name="Reporting" select name, owner orderby name desc limit 10 offset 5
* DB has name * DB has name
* DB is !JdbcAccess * DB is !JdbcAccess
* Column where Column isa PII * Column where Column isa PII
...@@ -112,7 +118,6 @@ Language Notes: ...@@ -112,7 +118,6 @@ Language Notes:
* Table where name="sales_fact", columns as column select column.name, column.dataType, column.comment * Table where name="sales_fact", columns as column select column.name, column.dataType, column.comment
* `Log Data` * `Log Data`
---++ Full-text Search ---++ Full-text Search
Atlas also exposes a lucene style full-text search capability. Atlas also exposes a lucene style full-text search capability.
\ No newline at end of file
...@@ -6,6 +6,7 @@ INCOMPATIBLE CHANGES: ...@@ -6,6 +6,7 @@ INCOMPATIBLE CHANGES:
ALL CHANGES: ALL CHANGES:
ATLAS-347 Atlas search APIs should allow pagination of results (shwethags)
ATLAS-639 Exception for lineage request (svimal2106 via shwethags) ATLAS-639 Exception for lineage request (svimal2106 via shwethags)
ATLAS-1022 Update typesystem wiki with details (yhemanth via shwethags) ATLAS-1022 Update typesystem wiki with details (yhemanth via shwethags)
ATLAS-1021 Update Atlas architecture wiki (yhemanth via sumasai) ATLAS-1021 Update Atlas architecture wiki (yhemanth via sumasai)
......
...@@ -22,12 +22,14 @@ import com.thinkaurelius.titan.core.TitanGraph; ...@@ -22,12 +22,14 @@ import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.atlas.ApplicationProperties; import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasClient; import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException; import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasProperties;
import org.apache.atlas.GraphTransaction; import org.apache.atlas.GraphTransaction;
import org.apache.atlas.discovery.graph.DefaultGraphPersistenceStrategy; import org.apache.atlas.discovery.graph.DefaultGraphPersistenceStrategy;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService; import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.query.GremlinQueryResult; import org.apache.atlas.query.GremlinQueryResult;
import org.apache.atlas.query.InputLineageClosureQuery; import org.apache.atlas.query.InputLineageClosureQuery;
import org.apache.atlas.query.OutputLineageClosureQuery; import org.apache.atlas.query.OutputLineageClosureQuery;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.repository.MetadataRepository; import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.repository.graph.GraphProvider; import org.apache.atlas.repository.graph.GraphProvider;
import org.apache.atlas.typesystem.exception.EntityNotFoundException; import org.apache.atlas.typesystem.exception.EntityNotFoundException;
...@@ -173,7 +175,8 @@ public class DataSetLineageService implements LineageService { ...@@ -173,7 +175,8 @@ public class DataSetLineageService implements LineageService {
private String getSchemaForId(String typeName, String guid) throws DiscoveryException { private String getSchemaForId(String typeName, String guid) throws DiscoveryException {
final String schemaQuery = final String schemaQuery =
String.format(propertiesConf.getString(DATASET_SCHEMA_QUERY_PREFIX + typeName), guid); String.format(propertiesConf.getString(DATASET_SCHEMA_QUERY_PREFIX + typeName), guid);
return discoveryService.searchByDSL(schemaQuery); int limit = AtlasProperties.getProperty(AtlasProperties.AtlasProperty.SEARCH_MAX_LIMIT);
return discoveryService.searchByDSL(schemaQuery, new QueryParams(limit, 0));
} }
@Override @Override
...@@ -192,7 +195,7 @@ public class DataSetLineageService implements LineageService { ...@@ -192,7 +195,7 @@ public class DataSetLineageService implements LineageService {
*/ */
private ReferenceableInstance validateDatasetNameExists(String datasetName) throws AtlasException { private ReferenceableInstance validateDatasetNameExists(String datasetName) throws AtlasException {
final String tableExistsQuery = String.format(DATASET_NAME_EXISTS_QUERY, datasetName); final String tableExistsQuery = String.format(DATASET_NAME_EXISTS_QUERY, datasetName);
GremlinQueryResult queryResult = discoveryService.evaluate(tableExistsQuery); GremlinQueryResult queryResult = discoveryService.evaluate(tableExistsQuery, new QueryParams(1, 0));
if (!(queryResult.rows().length() > 0)) { if (!(queryResult.rows().length() > 0)) {
throw new EntityNotFoundException(datasetName + " does not exist"); throw new EntityNotFoundException(datasetName + " does not exist");
} }
...@@ -207,7 +210,7 @@ public class DataSetLineageService implements LineageService { ...@@ -207,7 +210,7 @@ public class DataSetLineageService implements LineageService {
*/ */
private String validateDatasetExists(String guid) throws AtlasException { private String validateDatasetExists(String guid) throws AtlasException {
final String datasetExistsQuery = String.format(DATASET_EXISTS_QUERY, guid); final String datasetExistsQuery = String.format(DATASET_EXISTS_QUERY, guid);
GremlinQueryResult queryResult = discoveryService.evaluate(datasetExistsQuery); GremlinQueryResult queryResult = discoveryService.evaluate(datasetExistsQuery, new QueryParams(1, 0));
if (!(queryResult.rows().length() > 0)) { if (!(queryResult.rows().length() > 0)) {
throw new EntityNotFoundException("Dataset with guid = " + guid + " does not exist"); throw new EntityNotFoundException("Dataset with guid = " + guid + " does not exist");
} }
......
...@@ -18,6 +18,8 @@ ...@@ -18,6 +18,8 @@
package org.apache.atlas.discovery; package org.apache.atlas.discovery;
import org.apache.atlas.query.QueryParams;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
...@@ -27,17 +29,22 @@ import java.util.Map; ...@@ -27,17 +29,22 @@ import java.util.Map;
public interface DiscoveryService { public interface DiscoveryService {
/** /**
* Full text search * Searches using Full text query
* @param query query string
* @param queryParams Default query parameters like limit, offset
* @return results json
* @throws DiscoveryException
*/ */
String searchByFullText(String query) throws DiscoveryException; String searchByFullText(String query, QueryParams queryParams) throws DiscoveryException;
/** /**
* Search using query DSL. * Searches using DSL query
* * @param dslQuery query string
* @param dslQuery query in DSL format. * @param queryParams Default query parameters like limit, offset
* @return JSON representing the type and results. * @return results json
* @throws DiscoveryException
*/ */
String searchByDSL(String dslQuery) throws DiscoveryException; String searchByDSL(String dslQuery, QueryParams queryParams) throws DiscoveryException;
/** /**
* Assumes the User is familiar with the persistence structure of the Repository. * Assumes the User is familiar with the persistence structure of the Repository.
......
...@@ -142,7 +142,10 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi ...@@ -142,7 +142,10 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
if (dataType.getName().equals(idType.getName())) { if (dataType.getName().equals(idType.getName())) {
structInstance.set(idType.typeNameAttrName(), GraphHelper.getProperty(structVertex, typeAttributeName())); structInstance.set(idType.typeNameAttrName(), GraphHelper.getProperty(structVertex, typeAttributeName()));
structInstance.set(idType.idAttrName(), GraphHelper.getProperty(structVertex, idAttributeName())); structInstance.set(idType.idAttrName(), GraphHelper.getProperty(structVertex, idAttributeName()));
structInstance.set(idType.stateAttrName(), GraphHelper.getProperty(structVertex, stateAttributeName())); String stateValue = GraphHelper.getProperty(structVertex, stateAttributeName());
if (stateValue != null) {
structInstance.set(idType.stateAttrName(), stateValue);
}
} else { } else {
metadataRepository.getGraphToInstanceMapper() metadataRepository.getGraphToInstanceMapper()
.mapVertexToInstance(structVertex, structInstance, structType.fieldMapping().fields); .mapVertexToInstance(structVertex, structInstance, structType.fieldMapping().fields);
......
...@@ -32,6 +32,7 @@ import org.apache.atlas.query.GremlinEvaluator; ...@@ -32,6 +32,7 @@ import org.apache.atlas.query.GremlinEvaluator;
import org.apache.atlas.query.GremlinQuery; import org.apache.atlas.query.GremlinQuery;
import org.apache.atlas.query.GremlinQueryResult; import org.apache.atlas.query.GremlinQueryResult;
import org.apache.atlas.query.GremlinTranslator; import org.apache.atlas.query.GremlinTranslator;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.query.QueryParser; import org.apache.atlas.query.QueryParser;
import org.apache.atlas.query.QueryProcessor; import org.apache.atlas.query.QueryProcessor;
import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.Constants;
...@@ -83,8 +84,8 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -83,8 +84,8 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
// .html#query-string-syntax for query syntax // .html#query-string-syntax for query syntax
@Override @Override
@GraphTransaction @GraphTransaction
public String searchByFullText(String query) throws DiscoveryException { public String searchByFullText(String query, QueryParams queryParams) throws DiscoveryException {
String graphQuery = String.format("v.%s:(%s)", Constants.ENTITY_TEXT_PROPERTY_KEY, query); String graphQuery = String.format("v.\"%s\":(%s)", Constants.ENTITY_TEXT_PROPERTY_KEY, query);
LOG.debug("Full text query: {}", graphQuery); LOG.debug("Full text query: {}", graphQuery);
Iterator<TitanIndexQuery.Result<Vertex>> results = Iterator<TitanIndexQuery.Result<Vertex>> results =
titanGraph.indexQuery(Constants.FULLTEXT_INDEX, graphQuery).vertices().iterator(); titanGraph.indexQuery(Constants.FULLTEXT_INDEX, graphQuery).vertices().iterator();
...@@ -112,27 +113,20 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -112,27 +113,20 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
return response.toString(); return response.toString();
} }
/**
* Search using query DSL.
*
* @param dslQuery query in DSL format.
* @return JSON representing the type and results.
*/
@Override @Override
@GraphTransaction @GraphTransaction
public String searchByDSL(String dslQuery) throws DiscoveryException { public String searchByDSL(String dslQuery, QueryParams queryParams) throws DiscoveryException {
LOG.info("Executing dsl query={}", dslQuery); GremlinQueryResult queryResult = evaluate(dslQuery, queryParams);
GremlinQueryResult queryResult = evaluate(dslQuery);
return queryResult.toJson(); return queryResult.toJson();
} }
public GremlinQueryResult evaluate(String dslQuery) throws DiscoveryException { public GremlinQueryResult evaluate(String dslQuery, QueryParams queryParams) throws DiscoveryException {
LOG.info("Executing dsl query={}", dslQuery); LOG.info("Executing dsl query={}", dslQuery);
try { try {
Either<Parsers.NoSuccess, Expressions.Expression> either = QueryParser.apply(dslQuery); Either<Parsers.NoSuccess, Expressions.Expression> either = QueryParser.apply(dslQuery, queryParams);
if (either.isRight()) { if (either.isRight()) {
Expressions.Expression expression = either.right().get(); Expressions.Expression expression = either.right().get();
return evaluate(expression); return evaluate(dslQuery, expression);
} else { } else {
throw new DiscoveryException("Invalid expression : " + dslQuery + ". " + either.left()); throw new DiscoveryException("Invalid expression : " + dslQuery + ". " + either.left());
} }
...@@ -141,8 +135,16 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -141,8 +135,16 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
} }
} }
public GremlinQueryResult evaluate(Expressions.Expression expression) { private GremlinQueryResult evaluate(String dslQuery, Expressions.Expression expression) {
Expressions.Expression validatedExpression = QueryProcessor.validate(expression); Expressions.Expression validatedExpression = QueryProcessor.validate(expression);
//If the final limit is 0, don't launch the query, return with 0 rows
if (validatedExpression instanceof Expressions.LimitExpression
&& ((Expressions.LimitExpression) validatedExpression).limit().rawValue() == 0) {
return new GremlinQueryResult(dslQuery, validatedExpression.dataType(),
scala.collection.immutable.List.empty());
}
GremlinQuery gremlinQuery = new GremlinTranslator(validatedExpression, graphPersistenceStrategy).translate(); GremlinQuery gremlinQuery = new GremlinTranslator(validatedExpression, graphPersistenceStrategy).translate();
LOG.debug("Query = {}", validatedExpression); LOG.debug("Query = {}", validatedExpression);
LOG.debug("Expression Tree = {}", validatedExpression.treeString()); LOG.debug("Expression Tree = {}", validatedExpression.treeString());
...@@ -176,40 +178,42 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -176,40 +178,42 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
} }
} }
private List<Map<String, String>> extractResult(Object o) throws DiscoveryException { private List<Map<String, String>> extractResult(final Object o) throws DiscoveryException {
if (!(o instanceof List)) {
throw new DiscoveryException(String.format("Cannot process result %s", o.toString()));
}
List l = (List) o;
List<Map<String, String>> result = new ArrayList<>(); List<Map<String, String>> result = new ArrayList<>();
for (Object r : l) { if (o instanceof List) {
List l = (List) o;
Map<String, String> oRow = new HashMap<>(); for (Object r : l) {
if (r instanceof Map) {
@SuppressWarnings("unchecked") Map<Object, Object> iRow = (Map) r; Map<String, String> oRow = new HashMap<>();
for (Map.Entry e : iRow.entrySet()) { if (r instanceof Map) {
Object k = e.getKey(); @SuppressWarnings("unchecked") Map<Object, Object> iRow = (Map) r;
Object v = e.getValue(); for (Map.Entry e : iRow.entrySet()) {
oRow.put(k.toString(), v.toString()); Object k = e.getKey();
} Object v = e.getValue();
} else if (r instanceof TitanVertex) { oRow.put(k.toString(), v.toString());
Iterable<TitanProperty> ps = ((TitanVertex) r).getProperties(); }
for (TitanProperty tP : ps) { } else if (r instanceof TitanVertex) {
String pName = tP.getPropertyKey().getName(); Iterable<TitanProperty> ps = ((TitanVertex) r).getProperties();
Object pValue = ((TitanVertex) r).getProperty(pName); for (TitanProperty tP : ps) {
if (pValue != null) { String pName = tP.getPropertyKey().getName();
oRow.put(pName, pValue.toString()); Object pValue = ((TitanVertex) r).getProperty(pName);
if (pValue != null) {
oRow.put(pName, pValue.toString());
}
} }
} else if (r instanceof String) {
oRow.put("", r.toString());
} else {
throw new DiscoveryException(String.format("Cannot process result %s", o.toString()));
} }
} else if (r instanceof String) { result.add(oRow);
oRow.put("", r.toString());
} else {
throw new DiscoveryException(String.format("Cannot process result %s", o.toString()));
} }
} else {
result.add(oRow); result.add(new HashMap<String, String>() {{
put("result", o.toString());
}});
} }
return result; return result;
} }
......
...@@ -18,11 +18,8 @@ ...@@ -18,11 +18,8 @@
package org.apache.atlas.query package org.apache.atlas.query
import java.util
import com.google.common.collect.ImmutableCollection import com.google.common.collect.ImmutableCollection
import org.apache.atlas.AtlasException import org.apache.atlas.AtlasException
import org.apache.atlas.typesystem.ITypedInstance
import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory} import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory}
import org.apache.atlas.typesystem.types._ import org.apache.atlas.typesystem.types._
...@@ -35,15 +32,15 @@ object Expressions { ...@@ -35,15 +32,15 @@ object Expressions {
extends AtlasException(message, cause, enableSuppression, writableStackTrace) { extends AtlasException(message, cause, enableSuppression, writableStackTrace) {
def this(e: Expression, message: String) { def this(e: Expression, message: String) {
this(e, message, null, false, false) this(e, message, null, false, true)
} }
def this(e: Expression, message: String, cause: Throwable) { def this(e: Expression, message: String, cause: Throwable) {
this(e, message, cause, false, false) this(e, message, cause, false, true)
} }
def this(e: Expression, cause: Throwable) { def this(e: Expression, cause: Throwable) {
this(e, null, cause, false, false) this(e, null, cause, false, true)
} }
override def getMessage: String = { override def getMessage: String = {
...@@ -333,7 +330,7 @@ object Expressions { ...@@ -333,7 +330,7 @@ object Expressions {
def limit(lmt: Literal[Integer], offset : Literal[Integer]) = new LimitExpression(this, lmt, offset) def limit(lmt: Literal[Integer], offset : Literal[Integer]) = new LimitExpression(this, lmt, offset)
def order(odr: String, asc: Boolean) = new OrderExpression(this, odr, asc) def order(odr: Expression, asc: Boolean) = new OrderExpression(this, odr, asc)
} }
trait BinaryNode { trait BinaryNode {
...@@ -775,9 +772,9 @@ object Expressions { ...@@ -775,9 +772,9 @@ object Expressions {
override def toString = s"$child withPath" override def toString = s"$child withPath"
} }
case class LimitExpression(child: Expression, limit: Literal[Integer], offset: Literal[Integer]) extends Expression with UnaryNode { case class LimitExpression(child: Expression, limit: Literal[Integer], offset: Literal[Integer]) extends Expression with UnaryNode {
override def toString = s"$child limit $limit offset $offset " override def toString = s"$child limit $limit offset $offset "
lazy val dataType = { lazy val dataType = {
if (!resolved) { if (!resolved) {
...@@ -788,9 +785,9 @@ object Expressions { ...@@ -788,9 +785,9 @@ object Expressions {
} }
} }
case class OrderExpression(child: Expression, odr: String, asc: Boolean) extends Expression with UnaryNode { case class OrderExpression(child: Expression, odr: Expression, asc: Boolean) extends Expression with UnaryNode {
override def toString = s"$child order $odr asc $asc" override def toString = s"$child orderby $odr asc $asc"
lazy val dataType = { lazy val dataType = {
if (!resolved) { if (!resolved) {
......
...@@ -317,13 +317,20 @@ class GremlinTranslator(expr: Expression, ...@@ -317,13 +317,20 @@ class GremlinTranslator(expr: Expression,
s"${genQuery(child, inSelect)}.path" s"${genQuery(child, inSelect)}.path"
} }
case order@OrderExpression(child, odr, asc) => { case order@OrderExpression(child, odr, asc) => {
var orderExpression = odr
if(odr.isInstanceOf[BackReference]) { orderExpression = odr.asInstanceOf[BackReference].reference }
else if (odr.isInstanceOf[AliasExpression]) { orderExpression = odr.asInstanceOf[AliasExpression].child}
val orderbyProperty = genQuery(orderExpression, false)
val bProperty = s"it.b.$orderbyProperty"
val aProperty = s"it.a.$orderbyProperty"
val aCondition = s"($aProperty != null ? $aProperty.toLowerCase(): $aProperty)"
val bCondition = s"($bProperty != null ? $bProperty.toLowerCase(): $bProperty)"
var orderby = "" var orderby = ""
asc match { asc match {
//builds a closure comparison function based on provided order by clause in DSL. This will be used to sort the results by gremlin order pipe. //builds a closure comparison function based on provided order by clause in DSL. This will be used to sort the results by gremlin order pipe.
//Ordering is case insensitive. //Ordering is case insensitive.
case false=> orderby = s"order{(it.b.getProperty('$odr') !=null ? it.b.getProperty('$odr').toLowerCase(): it.b.getProperty('$odr')) <=> (it.a.getProperty('$odr') != null ? it.a.getProperty('$odr').toLowerCase(): it.a.getProperty('$odr'))}"//descending case false=> orderby = s"order{$bCondition <=> $aCondition}"//descending
case _ => orderby = s"order{(it.a.getProperty('$odr') != null ? it.a.getProperty('$odr').toLowerCase(): it.a.getProperty('$odr')) <=> (it.b.getProperty('$odr') !=null ? it.b.getProperty('$odr').toLowerCase(): it.b.getProperty('$odr'))}" case _ => orderby = s"order{$aCondition <=> $bCondition}"
} }
s"""${genQuery(child, inSelect)}.$orderby""" s"""${genQuery(child, inSelect)}.$orderby"""
} }
...@@ -410,7 +417,7 @@ class GremlinTranslator(expr: Expression, ...@@ -410,7 +417,7 @@ class GremlinTranslator(expr: Expression,
e1 = e1.transformUp(traitClauseWithInstanceForTop(e1)) e1 = e1.transformUp(traitClauseWithInstanceForTop(e1))
//Following code extracts the select expressions from expression tree. //Following code extracts the select expressions from expression tree.
val se = SelectExpressionHelper.extractSelectExpression(e1) val se = SelectExpressionHelper.extractSelectExpression(e1)
if (se.isDefined) if (se.isDefined)
{ {
......
...@@ -93,7 +93,7 @@ trait ExpressionUtils { ...@@ -93,7 +93,7 @@ trait ExpressionUtils {
input.limit(lmt, offset) input.limit(lmt, offset)
} }
def order(input: Expression, odr: String, asc: Boolean) = { def order(input: Expression, odr: Expression, asc: Boolean) = {
input.order(odr, asc) input.order(odr, asc)
} }
...@@ -118,6 +118,9 @@ trait ExpressionUtils { ...@@ -118,6 +118,9 @@ trait ExpressionUtils {
sngQuery2.transformUp(replaceIdWithField(leftSrcId, snglQuery1.field(leftSrcId.name))) sngQuery2.transformUp(replaceIdWithField(leftSrcId, snglQuery1.field(leftSrcId.name)))
} }
} }
case class QueryParams(limit: Int, offset: Int)
/** /**
* Query parser is used to parse the DSL query. It uses scala PackratParsers and pattern matching to extract the expressions. * Query parser is used to parse the DSL query. It uses scala PackratParsers and pattern matching to extract the expressions.
* It builds up a expression tree. * It builds up a expression tree.
...@@ -134,7 +137,12 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi ...@@ -134,7 +137,12 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi
override val lexical = new QueryLexer(queryreservedWords, querydelims) override val lexical = new QueryLexer(queryreservedWords, querydelims)
def apply(input: String): Either[NoSuccess, Expression] = synchronized { /**
* @param input query string
* @param queryParams query parameters that contains limit and offset
* @return
*/
def apply(input: String)(implicit queryParams: QueryParams = null): Either[NoSuccess, Expression] = synchronized {
phrase(queryWithPath)(new lexical.Scanner(input)) match { phrase(queryWithPath)(new lexical.Scanner(input)) match {
case Success(r, x) => Right(r) case Success(r, x) => Right(r)
case f@Failure(m, x) => Left(f) case f@Failure(m, x) => Left(f)
...@@ -142,23 +150,21 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi ...@@ -142,23 +150,21 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi
} }
} }
def queryWithPath = query ~ opt(WITHPATH) ^^ { import scala.math._
def queryWithPath(implicit queryParams: QueryParams) = query ~ opt(WITHPATH) ^^ {
case q ~ None => q case q ~ None => q
case q ~ p => q.path() case q ~ p => q.path()
} }
def query: Parser[Expression] = rep1sep(singleQuery, opt(COMMA)) ^^ { l => l match { /**
case h :: Nil => h
case h :: t => t.foldLeft(h)(merge(_, _))
}
}
/**
* A singleQuery can have the following forms: * A singleQuery can have the following forms:
* 1. SrcQuery [select] [orderby desc] [Limit x offset y] -> source query followed by optional select statement followed by optional order by followed by optional limit * 1. SrcQuery [select] [orderby desc] [Limit x offset y] -> source query followed by optional select statement followed by optional order by followed by optional limit
* eg: Select "hive_db where hive_db has name orderby 'hive_db.owner' limit 2 offset 1" * eg: Select "hive_db where hive_db has name orderby 'hive_db.owner' limit 2 offset 1"
* @return *
* @return
*/ */
def singleQuery = singleQrySrc ~ opt(loopExpression) ~ opt(selectClause) ~ opt(orderby) ~ opt(limitOffset) ^^ { def query(implicit queryParams: QueryParams) = querySrc ~ opt(loopExpression) ~ opt(selectClause) ~ opt(orderby) ~ opt(limitOffset) ^^ {
case s ~ l ~ sel ~ odr ~ lmtoff => { case s ~ l ~ sel ~ odr ~ lmtoff => {
var expressiontree = s var expressiontree = s
if (l.isDefined) //Note: The order of if statements is important. if (l.isDefined) //Note: The order of if statements is important.
...@@ -169,18 +175,30 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi ...@@ -169,18 +175,30 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi
{ {
expressiontree = order(expressiontree, odr.get._1, odr.get._2) expressiontree = order(expressiontree, odr.get._1, odr.get._2)
} }
if (lmtoff.isDefined)
{
expressiontree = limit(expressiontree, int (lmtoff.get._1), int (lmtoff.get._2))
}
if (sel.isDefined) if (sel.isDefined)
{ {
expressiontree = select(expressiontree, sel.get) expressiontree = select(expressiontree, sel.get)
} }
if (queryParams != null && lmtoff.isDefined)
{
val mylimit = int(min(queryParams.limit, max(lmtoff.get._1 - queryParams.offset, 0)))
val myoffset = int(queryParams.offset + lmtoff.get._2)
expressiontree = limit(expressiontree, mylimit, myoffset)
} else if(lmtoff.isDefined) {
expressiontree = limit(expressiontree, int(lmtoff.get._1), int(lmtoff.get._2))
} else if(queryParams != null) {
expressiontree = limit(expressiontree, int(queryParams.limit), int(queryParams.offset))
}
expressiontree expressiontree
} }
} }
def querySrc: Parser[Expression] = rep1sep(singleQrySrc, opt(COMMA)) ^^ { l => l match {
case h :: Nil => h
case h :: t => t.foldLeft(h)(merge(_, _))
}
}
/** /**
* A SingleQuerySrc can have the following forms: * A SingleQuerySrc can have the following forms:
* 1. FROM id [WHERE] [expr] -> from optionally followed by a filter * 1. FROM id [WHERE] [expr] -> from optionally followed by a filter
...@@ -218,14 +236,14 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi ...@@ -218,14 +236,14 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi
def fromSrc = identifier ~ AS ~ alias ^^ { case s ~ a ~ al => s.as(al)} | def fromSrc = identifier ~ AS ~ alias ^^ { case s ~ a ~ al => s.as(al)} |
identifier identifier
def orderby = ORDERBY ~ order ~ opt (asce) ^^ { def orderby = ORDERBY ~ expr ~ opt (asce) ^^ {
case o ~ odr ~ None => (odr, true) case o ~ odr ~ None => (odr, true)
case o ~ odr ~ asc => (odr, asc.get) case o ~ odr ~ asc => (odr, asc.get)
} }
def limitOffset = LIMIT ~ lmt ~ opt (offset) ^^ { def limitOffset: Parser[(Int, Int)] = LIMIT ~ lmt ~ opt (offset) ^^ {
case l ~ lt ~ None => (lt, 0) case l ~ lt ~ None => (lt.toInt, 0)
case l ~ lt ~ of => (lt, of.get) case l ~ lt ~ of => (lt.toInt, of.get.toInt)
} }
def offset = OFFSET ~ ofset ^^ { def offset = OFFSET ~ ofset ^^ {
...@@ -237,7 +255,7 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi ...@@ -237,7 +255,7 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi
case _ => true case _ => true
} }
def loopExpression: Parser[(Expression, Option[Literal[Integer]], Option[String])] = def loopExpression(implicit queryParams: QueryParams): Parser[(Expression, Option[Literal[Integer]], Option[String])] =
LOOP ~ (LPAREN ~> query <~ RPAREN) ~ opt(intConstant <~ TIMES) ~ opt(AS ~> alias) ^^ { LOOP ~ (LPAREN ~> query <~ RPAREN) ~ opt(intConstant <~ TIMES) ~ opt(AS ~> alias) ^^ {
case l ~ e ~ None ~ a => (e, None, a) case l ~ e ~ None ~ a => (e, None, a)
case l ~ e ~ Some(i) ~ a => (e, Some(int(i)), a) case l ~ e ~ Some(i) ~ a => (e, Some(int(i)), a)
...@@ -297,8 +315,6 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi ...@@ -297,8 +315,6 @@ object QueryParser extends StandardTokenParsers with QueryKeywords with Expressi
def ofset = intConstant def ofset = intConstant
def order = ident | stringLit
def asc = ident | stringLit def asc = ident | stringLit
def literal = booleanConstant ^^ { def literal = booleanConstant ^^ {
......
...@@ -97,7 +97,7 @@ class Resolver(srcExpr: Option[Expression] = None, aliases: Map[String, Expressi ...@@ -97,7 +97,7 @@ class Resolver(srcExpr: Option[Expression] = None, aliases: Map[String, Expressi
} }
case order@OrderExpression(child, odr, asc) => { case order@OrderExpression(child, odr, asc) => {
val r = new Resolver(Some(child), child.namedExpressions) val r = new Resolver(Some(child), child.namedExpressions)
return new OrderExpression(child.transformUp(r), odr, asc) return new OrderExpression(child, odr.transformUp(r), asc)
} }
case x => x case x => x
} }
......
...@@ -22,7 +22,7 @@ import java.util ...@@ -22,7 +22,7 @@ import java.util
import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.atomic.AtomicInteger
import org.apache.atlas.AtlasException import org.apache.atlas.AtlasException
import org.apache.atlas.query.Expressions.{PathExpression, SelectExpression} import org.apache.atlas.query.Expressions.{LimitExpression, PathExpression, SelectExpression}
import org.apache.atlas.repository.Constants import org.apache.atlas.repository.Constants
import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory} import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory}
import org.apache.atlas.typesystem.types._ import org.apache.atlas.typesystem.types._
...@@ -80,7 +80,7 @@ object TypeUtils { ...@@ -80,7 +80,7 @@ object TypeUtils {
val resultAttr = new AttributeDefinition(resultAttrName, resultType.getName, Multiplicity.REQUIRED, false, null) val resultAttr = new AttributeDefinition(resultAttrName, resultType.getName, Multiplicity.REQUIRED, false, null)
val typName = s"${TEMP_STRUCT_NAME_PREFIX}${tempStructCounter.getAndIncrement}" val typName = s"${TEMP_STRUCT_NAME_PREFIX}${tempStructCounter.getAndIncrement}"
val m : java.util.HashMap[String, IDataType[_]] = new util.HashMap[String, IDataType[_]]() val m : java.util.HashMap[String, IDataType[_]] = new util.HashMap[String, IDataType[_]]()
if ( pE.child.isInstanceOf[SelectExpression]) { if (pE.child.isInstanceOf[SelectExpression] || pE.child.isInstanceOf[LimitExpression]) {
m.put(pE.child.dataType.getName, pE.child.dataType) m.put(pE.child.dataType.getName, pE.child.dataType)
} }
typSystem.defineQueryResultType(typName, m, pathAttr, resultAttr); typSystem.defineQueryResultType(typName, m, pathAttr, resultAttr);
......
...@@ -23,6 +23,7 @@ import org.apache.atlas.AtlasClient; ...@@ -23,6 +23,7 @@ import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException; import org.apache.atlas.AtlasException;
import org.apache.atlas.BaseRepositoryTest; import org.apache.atlas.BaseRepositoryTest;
import org.apache.atlas.RepositoryMetadataModule; import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct; import org.apache.atlas.typesystem.Struct;
...@@ -117,7 +118,7 @@ public class DataSetLineageServiceTest extends BaseRepositoryTest { ...@@ -117,7 +118,7 @@ public class DataSetLineageServiceTest extends BaseRepositoryTest {
@Test(dataProvider = "dslQueriesProvider") @Test(dataProvider = "dslQueriesProvider")
public void testSearchByDSLQueries(String dslQuery) throws Exception { public void testSearchByDSLQueries(String dslQuery) throws Exception {
System.out.println("Executing dslQuery = " + dslQuery); System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery); String jsonResults = discoveryService.searchByDSL(dslQuery, new QueryParams(100, 0));
assertNotNull(jsonResults); assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults); JSONObject results = new JSONObject(jsonResults);
......
...@@ -25,6 +25,7 @@ import org.apache.atlas.RepositoryMetadataModule; ...@@ -25,6 +25,7 @@ import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.RequestContext; import org.apache.atlas.RequestContext;
import org.apache.atlas.TestUtils; import org.apache.atlas.TestUtils;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService; import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.MetadataRepository; import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer; import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
...@@ -72,6 +73,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -72,6 +73,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
@Inject @Inject
private GraphBackedDiscoveryService discoveryService; private GraphBackedDiscoveryService discoveryService;
private QueryParams queryParams = new QueryParams(40, 0);
@BeforeClass @BeforeClass
public void setUp() throws Exception { public void setUp() throws Exception {
...@@ -128,12 +130,16 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -128,12 +130,16 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
super.tearDown(); super.tearDown();
} }
private String searchByDSL(String dslQuery) throws Exception {
return discoveryService.searchByDSL(dslQuery, queryParams);
}
@Test @Test
public void testSearchBySystemProperties() throws Exception { public void testSearchBySystemProperties() throws Exception {
//system property in select //system property in select
String dslQuery = "from Department select __guid"; String dslQuery = "from Department select __guid";
String jsonResults = discoveryService.searchByDSL(dslQuery); String jsonResults = searchByDSL(dslQuery);
assertNotNull(jsonResults); assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults); JSONObject results = new JSONObject(jsonResults);
...@@ -147,7 +153,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -147,7 +153,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
//system property in where clause //system property in where clause
String guid = rows.getJSONObject(0).getString("__guid"); String guid = rows.getJSONObject(0).getString("__guid");
dslQuery = "Department where __guid = '" + guid + "' and __state = 'ACTIVE'"; dslQuery = "Department where __guid = '" + guid + "' and __state = 'ACTIVE'";
jsonResults = discoveryService.searchByDSL(dslQuery); jsonResults = searchByDSL(dslQuery);
assertNotNull(jsonResults); assertNotNull(jsonResults);
results = new JSONObject(jsonResults); results = new JSONObject(jsonResults);
...@@ -162,7 +168,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -162,7 +168,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
public void testSearchByDSLReturnsEntity() throws Exception { public void testSearchByDSLReturnsEntity() throws Exception {
String dslQuery = "from Department"; String dslQuery = "from Department";
String jsonResults = discoveryService.searchByDSL(dslQuery); String jsonResults = searchByDSL(dslQuery);
assertNotNull(jsonResults); assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults); JSONObject results = new JSONObject(jsonResults);
...@@ -190,8 +196,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -190,8 +196,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
@Test(expectedExceptions = Throwable.class) @Test(expectedExceptions = Throwable.class)
public void testSearchByDSLBadQuery() throws Exception { public void testSearchByDSLBadQuery() throws Exception {
String dslQuery = "from blah"; String dslQuery = "from blah";
searchByDSL(dslQuery);
discoveryService.searchByDSL(dslQuery);
Assert.fail(); Assert.fail();
} }
...@@ -383,6 +388,30 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -383,6 +388,30 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
}; };
} }
@DataProvider(name = "dslExplicitLimitQueriesProvider")
private Object[][] createDSLQueriesWithExplicitLimit() {
return new Object[][]{
{"hive_column", 37, 40, 0},//with higher limit all rows returned
{"hive_column limit 10", 10, 50, 0},//lower limit in query
{"hive_column select hive_column.name limit 10", 5, 5, 0},//lower limit in query param
{"hive_column select hive_column.name withPath", 20, 20, 0},//limit only in params
//with offset, only remaining rows returned
{"hive_column select hive_column.name limit 40 withPath", 17, 40, 20},
//with higher offset, no rows returned
{"hive_column select hive_column.name limit 40 withPath", 0, 40, 40},
//offset used from query
{"hive_column select hive_column.name limit 40 offset 10", 27, 40, 0},
//offsets in query and parameter added up
{"hive_column select hive_column.name limit 40 offset 10", 17, 40, 10},
//works with where clause
{"hive_db where name = 'Reporting' limit 10 offset 0", 1, 40, 0},
//works with joins
{"hive_db, hive_table where db.name = 'Reporting' limit 10", 1, 1, 0},
{"hive_column limit 25", 5, 10, 20}, //last page should return records limited by limit in query
{"hive_column limit 25", 0, 10, 30}, //offset > limit returns 0 rows
};
}
@DataProvider(name = "dslLimitQueriesProvider") @DataProvider(name = "dslLimitQueriesProvider")
private Object[][] createDSLQueriesWithLimit() { private Object[][] createDSLQueriesWithLimit() {
return new Object[][]{ return new Object[][]{
...@@ -390,6 +419,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -390,6 +419,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
{"hive_column select hive_column.name limit 10 ", 10}, {"hive_column select hive_column.name limit 10 ", 10},
{"hive_column select hive_column.name withPath", 37}, {"hive_column select hive_column.name withPath", 37},
{"hive_column select hive_column.name limit 10 withPath", 10}, {"hive_column select hive_column.name limit 10 withPath", 10},
{"from hive_db", 3}, {"from hive_db", 3},
{"from hive_db limit 2", 2}, {"from hive_db limit 2", 2},
{"from hive_db limit 2 offset 0", 2}, {"from hive_db limit 2 offset 0", 2},
...@@ -543,60 +573,60 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -543,60 +573,60 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
// {"hive_db hive_table orderby 'hive_db.owner'", 8, "owner", isAscending}, // {"hive_db hive_table orderby 'hive_db.owner'", 8, "owner", isAscending},
// {"hive_db hive_table orderby 'hive_db.owner' limit 5", 5, "owner", isAscending}, // {"hive_db hive_table orderby 'hive_db.owner' limit 5", 5, "owner", isAscending},
// {"hive_db hive_table orderby 'hive_db.owner' limit 5 offset 5", 3, "owner", isAscending}, // {"hive_db hive_table orderby 'hive_db.owner' limit 5 offset 5", 3, "owner", isAscending},
{"hive_column select hive_column.name orderby 'hive_column.name' limit 10 withPath", 10, "name", isAscending}, {"hive_column select hive_column.name orderby name limit 10 withPath", 10, "name", isAscending},
{"hive_column select hive_column.name orderby 'hive_column.name' asc limit 10 withPath", 10, "name", isAscending}, {"hive_column select hive_column.name orderby name asc limit 10 withPath", 10, "name", isAscending},
{"hive_column select hive_column.name orderby 'hive_column.name' desc limit 10 withPath", 10, "name", !isAscending}, {"hive_column select hive_column.name orderby 'hive_column.name' desc limit 10 withPath", 10, "name", !isAscending},
{"from hive_db orderby 'hive_db.owner' limit 3", 3, "owner", isAscending}, {"from hive_db orderby owner limit 3", 3, "owner", isAscending},
{"hive_db where hive_db.name=\"Reporting\" orderby 'owner'", 1, "owner", isAscending}, {"hive_db where hive_db.name=\"Reporting\" orderby owner", 1, "owner", isAscending},
{"hive_db where hive_db.name=\"Reporting\" orderby 'hive_db.owner' limit 10 ", 1, "owner", isAscending}, {"hive_db where hive_db.name=\"Reporting\" orderby owner limit 10", 1, "owner", isAscending},
{"hive_db where hive_db.name=\"Reporting\" select name, owner orderby 'hive_db.name' ", 1, "name", isAscending}, {"hive_db where hive_db.name=\"Reporting\" select name, owner orderby name", 1, "name", isAscending},
{"hive_db has name orderby 'hive_db.owner' limit 10 offset 0", 3, "owner", isAscending}, {"hive_db has name orderby 'hive_db.owner' limit 10 offset 0", 3, "owner", isAscending},
{"from hive_table orderby 'hive_table.owner'", 10, "owner", isAscending}, {"from hive_table orderby owner", 10, "owner", isAscending},
{"from hive_table orderby 'hive_table.owner' limit 8", 8, "owner", isAscending}, {"from hive_table orderby 'hive_table.owner' limit 8", 8, "owner", isAscending},
{"hive_table orderby 'hive_table.owner'", 10, "owner", isAscending}, {"hive_table orderby owner", 10, "owner", isAscending},
{"hive_table orderby 'hive_table.owner' limit 8", 8, "owner", isAscending}, {"hive_table orderby owner limit 8", 8, "owner", isAscending},
{"hive_table orderby 'hive_table.owner' limit 8 offset 0", 8, "owner", isAscending}, {"hive_table orderby owner limit 8 offset 0", 8, "owner", isAscending},
{"hive_table orderby 'hive_table.owner' desc limit 8 offset 0", 8, "owner", !isAscending}, {"hive_table orderby 'hive_table.owner' desc limit 8 offset 0", 8, "owner", !isAscending},
{"hive_table isa Dimension orderby 'hive_table.owner'", 3, "owner", isAscending},//order not working {"hive_table isa Dimension orderby owner", 3, "owner", isAscending},//order not working
{"hive_table isa Dimension orderby 'hive_table.owner' limit 3", 3, "owner", isAscending}, {"hive_table isa Dimension orderby owner limit 3", 3, "owner", isAscending},
{"hive_table isa Dimension orderby 'hive_table.owner' limit 3 offset 0", 3, "owner", isAscending}, {"hive_table isa Dimension orderby owner limit 3 offset 0", 3, "owner", isAscending},
{"hive_table isa Dimension orderby 'hive_table.owner' desc limit 3 offset 0", 3, "owner", !isAscending}, {"hive_table isa Dimension orderby 'hive_table.owner' desc limit 3 offset 0", 3, "owner", !isAscending},
{"hive_column where hive_column isa PII orderby 'hive_column.name'", 8, "name", isAscending}, {"hive_column where hive_column isa PII orderby name", 8, "name", isAscending},
{"hive_column where hive_column isa PII orderby 'hive_column.name' limit 5", 5, "name", isAscending}, {"hive_column where hive_column isa PII orderby name limit 5", 5, "name", isAscending},
{"hive_column where hive_column isa PII orderby 'hive_column.name' limit 5 offset 1", 5, "name", isAscending}, {"hive_column where hive_column isa PII orderby name limit 5 offset 1", 5, "name", isAscending},
{"hive_column where hive_column isa PII orderby 'hive_column.name' desc limit 5 offset 1", 5, "name", !isAscending}, {"hive_column where hive_column isa PII orderby 'hive_column.name' desc limit 5 offset 1", 5, "name", !isAscending},
{"hive_column select hive_column.name orderby 'hive_column.name' ", 37, "hive_column.name", isAscending}, {"hive_column select hive_column.name orderby name", 37, "hive_column.name", isAscending},
{"hive_column select hive_column.name orderby 'hive_column.name' limit 5", 5, "hive_column.name", isAscending}, {"hive_column select hive_column.name orderby name limit 5", 5, "hive_column.name", isAscending},
{"hive_column select hive_column.name orderby 'hive_column.name' desc limit 5", 5, "hive_column.name", !isAscending}, {"hive_column select hive_column.name orderby name desc limit 5", 5, "hive_column.name", !isAscending},
{"hive_column select hive_column.name orderby 'hive_column.name' limit 5 offset 36", 1, "hive_column.name", isAscending}, {"hive_column select hive_column.name orderby 'hive_column.name' limit 5 offset 36", 1, "hive_column.name", isAscending},
{"hive_column select name orderby 'hive_column.name'", 37, "name", isAscending}, {"hive_column select name orderby name", 37, "name", isAscending},
{"hive_column select name orderby 'hive_column.name' limit 5", 5, "name", isAscending}, {"hive_column select name orderby name limit 5", 5, "name", isAscending},
{"hive_column select name orderby 'hive_column.name' desc", 37, "name", !isAscending}, {"hive_column select name orderby 'hive_column.name' desc", 37, "name", !isAscending},
{"hive_column where hive_column.name=\"customer_id\" orderby 'hive_column.name'", 6, "name", isAscending}, {"hive_column where hive_column.name=\"customer_id\" orderby name", 6, "name", isAscending},
{"hive_column where hive_column.name=\"customer_id\" orderby 'hive_column.name' limit 2", 2, "name", isAscending}, {"hive_column where hive_column.name=\"customer_id\" orderby name limit 2", 2, "name", isAscending},
{"hive_column where hive_column.name=\"customer_id\" orderby 'hive_column.name' limit 2 offset 1", 2, "name", isAscending}, {"hive_column where hive_column.name=\"customer_id\" orderby 'hive_column.name' limit 2 offset 1", 2, "name", isAscending},
{"from hive_table select owner orderby 'hive_table.owner'", 10, "owner", isAscending}, {"from hive_table select owner orderby owner", 10, "owner", isAscending},
{"from hive_table select owner orderby 'hive_table.owner' limit 5", 5, "owner", isAscending}, {"from hive_table select owner orderby owner limit 5", 5, "owner", isAscending},
{"from hive_table select owner orderby 'hive_table.owner' desc limit 5", 5, "owner", !isAscending}, {"from hive_table select owner orderby owner desc limit 5", 5, "owner", !isAscending},
{"from hive_table select owner orderby 'hive_table.owner' limit 5 offset 5", 5, "owner", isAscending}, {"from hive_table select owner orderby 'hive_table.owner' limit 5 offset 5", 5, "owner", isAscending},
{"hive_db where (name = \"Reporting\") orderby 'hive_db.name'", 1, "name", isAscending}, {"hive_db where (name = \"Reporting\") orderby name", 1, "name", isAscending},
{"hive_db where (name = \"Reporting\") orderby 'hive_db.name' limit 10", 1, "name", isAscending}, {"hive_db where (name = \"Reporting\") orderby 'hive_db.name' limit 10", 1, "name", isAscending},
{"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1 orderby '_col_1'", 1, "_col_1", isAscending}, //will it work {"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1 orderby '_col_1'", 1, "_col_1", isAscending}, //will it work
{"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1 orderby '_col_1' limit 10", 1, "_col_1", isAscending}, {"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1 orderby owner limit 10", 1, "_col_1", isAscending},
{"hive_db where hive_db has name orderby 'hive_db.owner'", 3, "owner", isAscending}, {"hive_db where hive_db has name orderby owner", 3, "owner", isAscending},
{"hive_db where hive_db has name orderby 'hive_db.owner' limit 5", 3, "owner", isAscending}, {"hive_db where hive_db has name orderby owner limit 5", 3, "owner", isAscending},
{"hive_db where hive_db has name orderby 'hive_db.owner' limit 2 offset 0", 2, "owner", isAscending}, {"hive_db where hive_db has name orderby owner limit 2 offset 0", 2, "owner", isAscending},
{"hive_db where hive_db has name orderby 'hive_db.owner' limit 2 offset 1", 2, "owner", isAscending}, {"hive_db where hive_db has name orderby 'hive_db.owner' limit 2 offset 1", 2, "owner", isAscending},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1'", 1, "_col_1", isAscending}, {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1'", 1, "_col_1", isAscending},
...@@ -604,15 +634,15 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -604,15 +634,15 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10 offset 1", 0, "_col_1", isAscending}, {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10 offset 1", 0, "_col_1", isAscending},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10 offset 0", 1, "_col_1", isAscending}, {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10 offset 0", 1, "_col_1", isAscending},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' ", 1, "_col_1", isAscending}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime", 1, "_col_1", isAscending},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 ", 1, "_col_1", isAscending}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 ", 1, "_col_1", isAscending},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 0", 1, "_col_1", isAscending}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime limit 10 offset 0", 1, "_col_1", isAscending},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 5", 0, "_col_1", isAscending}, {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 5", 0, "_col_1", isAscending},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' ", 1, "_col_0", isAscending}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name", 1, "_col_0", isAscending},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 0", 1, "_col_0", isAscending}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 0", 1, "_col_0", isAscending},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 1", 0, "_col_0", isAscending}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10 offset 1", 0, "_col_0", isAscending},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10", 1, "_col_0", isAscending}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby name limit 10", 1, "_col_0", isAscending},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 0 offset 1", 0, "_col_0", isAscending}, {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 0 offset 1", 0, "_col_0", isAscending},
...@@ -624,7 +654,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -624,7 +654,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
@Test(dataProvider = "dslOrderByQueriesProvider") @Test(dataProvider = "dslOrderByQueriesProvider")
public void testSearchByDSLQueriesWithOrderBy(String dslQuery, Integer expectedNumRows, String orderBy, boolean ascending) throws Exception { public void testSearchByDSLQueriesWithOrderBy(String dslQuery, Integer expectedNumRows, String orderBy, boolean ascending) throws Exception {
System.out.println("Executing dslQuery = " + dslQuery); System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery); String jsonResults = searchByDSL(dslQuery);
assertNotNull(jsonResults); assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults); JSONObject results = new JSONObject(jsonResults);
...@@ -677,17 +707,23 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -677,17 +707,23 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
@Test(dataProvider = "dslQueriesProvider") @Test(dataProvider = "dslQueriesProvider")
public void testSearchByDSLQueries(String dslQuery, Integer expectedNumRows) throws Exception { public void testSearchByDSLQueries(String dslQuery, Integer expectedNumRows) throws Exception {
runQuery(dslQuery, expectedNumRows); runQuery(dslQuery, expectedNumRows, 40, 0);
} }
@Test(dataProvider = "comparisonQueriesProvider") @Test(dataProvider = "comparisonQueriesProvider")
public void testDataTypeComparisonQueries(String dslQuery, Integer expectedNumRows) throws Exception { public void testDataTypeComparisonQueries(String dslQuery, Integer expectedNumRows) throws Exception {
runQuery(dslQuery, expectedNumRows); runQuery(dslQuery, expectedNumRows, 40, 0);
}
@Test(dataProvider = "dslExplicitLimitQueriesProvider")
public void testSearchByDSLQueriesWithExplicitLimit(String dslQuery, Integer expectedNumRows, int limit, int offset)
throws Exception {
runQuery(dslQuery, expectedNumRows, limit, offset);
} }
public void runQuery(String dslQuery, Integer expectedNumRows) throws Exception { public void runQuery(String dslQuery, Integer expectedNumRows, int limitParam, int offsetParam) throws Exception {
System.out.println("Executing dslQuery = " + dslQuery); System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery); String jsonResults = discoveryService.searchByDSL(dslQuery, new QueryParams(limitParam, offsetParam));
assertNotNull(jsonResults); assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults); JSONObject results = new JSONObject(jsonResults);
...@@ -710,9 +746,9 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -710,9 +746,9 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
@Test(dataProvider = "dslLimitQueriesProvider") @Test(dataProvider = "dslLimitQueriesProvider")
public void testSearchByDSLQueriesWithLimit(String dslQuery, Integer expectedNumRows) throws Exception { public void testSearchByDSLQueriesWithLimit(String dslQuery, Integer expectedNumRows) throws Exception {
runQuery(dslQuery, expectedNumRows); runQuery(dslQuery, expectedNumRows, 40, 0);
} }
@DataProvider(name = "invalidDslQueriesProvider") @DataProvider(name = "invalidDslQueriesProvider")
private Object[][] createInvalidDSLQueries() { private Object[][] createInvalidDSLQueries() {
return new String[][]{{"from Unknown"}, {"Unknown"}, {"Unknown is Blah"},}; return new String[][]{{"from Unknown"}, {"Unknown"}, {"Unknown is Blah"},};
...@@ -721,7 +757,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -721,7 +757,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
@Test(dataProvider = "invalidDslQueriesProvider", expectedExceptions = DiscoveryException.class) @Test(dataProvider = "invalidDslQueriesProvider", expectedExceptions = DiscoveryException.class)
public void testSearchByDSLInvalidQueries(String dslQuery) throws Exception { public void testSearchByDSLInvalidQueries(String dslQuery) throws Exception {
System.out.println("Executing dslQuery = " + dslQuery); System.out.println("Executing dslQuery = " + dslQuery);
discoveryService.searchByDSL(dslQuery); searchByDSL(dslQuery);
Assert.fail(); Assert.fail();
} }
...@@ -731,7 +767,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest { ...@@ -731,7 +767,7 @@ public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
createInstances(); createInstances();
String dslQuery = "from D where a = 1"; String dslQuery = "from D where a = 1";
String jsonResults = discoveryService.searchByDSL(dslQuery); String jsonResults = searchByDSL(dslQuery);
assertNotNull(jsonResults); assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults); JSONObject results = new JSONObject(jsonResults);
......
...@@ -30,6 +30,7 @@ import org.apache.atlas.RepositoryMetadataModule; ...@@ -30,6 +30,7 @@ import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.RequestContext; import org.apache.atlas.RequestContext;
import org.apache.atlas.TestUtils; import org.apache.atlas.TestUtils;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService; import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.RepositoryException; import org.apache.atlas.repository.RepositoryException;
import org.apache.atlas.typesystem.IStruct; import org.apache.atlas.typesystem.IStruct;
...@@ -91,6 +92,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -91,6 +92,7 @@ public class GraphBackedMetadataRepositoryTest {
private TypeSystem typeSystem; private TypeSystem typeSystem;
private String guid; private String guid;
private QueryParams queryParams = new QueryParams(100, 0);
@BeforeClass @BeforeClass
public void setUp() throws Exception { public void setUp() throws Exception {
...@@ -424,7 +426,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -424,7 +426,7 @@ public class GraphBackedMetadataRepositoryTest {
public void testSearchByDSLQuery() throws Exception { public void testSearchByDSLQuery() throws Exception {
String dslQuery = "hive_database as PII"; String dslQuery = "hive_database as PII";
System.out.println("Executing dslQuery = " + dslQuery); System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery); String jsonResults = discoveryService.searchByDSL(dslQuery, queryParams);
Assert.assertNotNull(jsonResults); Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults); JSONObject results = new JSONObject(jsonResults);
...@@ -457,7 +459,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -457,7 +459,7 @@ public class GraphBackedMetadataRepositoryTest {
public void testSearchByDSLWithInheritance() throws Exception { public void testSearchByDSLWithInheritance() throws Exception {
String dslQuery = "Person where name = 'Jane'"; String dslQuery = "Person where name = 'Jane'";
System.out.println("Executing dslQuery = " + dslQuery); System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery); String jsonResults = discoveryService.searchByDSL(dslQuery, queryParams);
Assert.assertNotNull(jsonResults); Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults); JSONObject results = new JSONObject(jsonResults);
...@@ -488,7 +490,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -488,7 +490,7 @@ public class GraphBackedMetadataRepositoryTest {
TestUtils.dumpGraph(graphProvider.get()); TestUtils.dumpGraph(graphProvider.get());
System.out.println("Executing dslQuery = " + dslQuery); System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery); String jsonResults = discoveryService.searchByDSL(dslQuery, queryParams);
Assert.assertNotNull(jsonResults); Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults); JSONObject results = new JSONObject(jsonResults);
...@@ -522,7 +524,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -522,7 +524,7 @@ public class GraphBackedMetadataRepositoryTest {
//person in hr department whose name is john //person in hr department whose name is john
Thread.sleep(sleepInterval); Thread.sleep(sleepInterval);
String response = discoveryService.searchByFullText("john"); String response = discoveryService.searchByFullText("john", queryParams);
Assert.assertNotNull(response); Assert.assertNotNull(response);
JSONArray results = new JSONArray(response); JSONArray results = new JSONArray(response);
Assert.assertEquals(results.length(), 1); Assert.assertEquals(results.length(), 1);
...@@ -530,7 +532,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -530,7 +532,7 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertEquals(row.get("typeName"), "Person"); Assert.assertEquals(row.get("typeName"), "Person");
//person in hr department who lives in santa clara //person in hr department who lives in santa clara
response = discoveryService.searchByFullText("Jane AND santa AND clara"); response = discoveryService.searchByFullText("Jane AND santa AND clara", queryParams);
Assert.assertNotNull(response); Assert.assertNotNull(response);
results = new JSONArray(response); results = new JSONArray(response);
Assert.assertEquals(results.length(), 1); Assert.assertEquals(results.length(), 1);
...@@ -538,7 +540,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -538,7 +540,7 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertEquals(row.get("typeName"), "Manager"); Assert.assertEquals(row.get("typeName"), "Manager");
//search for person in hr department whose name starts is john/jahn //search for person in hr department whose name starts is john/jahn
response = discoveryService.searchByFullText("hr AND (john OR jahn)"); response = discoveryService.searchByFullText("hr AND (john OR jahn)", queryParams);
Assert.assertNotNull(response); Assert.assertNotNull(response);
results = new JSONArray(response); results = new JSONArray(response);
Assert.assertEquals(results.length(), 1); Assert.assertEquals(results.length(), 1);
......
...@@ -31,6 +31,7 @@ import org.apache.atlas.RequestContext; ...@@ -31,6 +31,7 @@ import org.apache.atlas.RequestContext;
import org.apache.atlas.TestUtils; import org.apache.atlas.TestUtils;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService; import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.listener.EntityChangeListener; import org.apache.atlas.listener.EntityChangeListener;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.repository.audit.EntityAuditRepository; import org.apache.atlas.repository.audit.EntityAuditRepository;
import org.apache.atlas.repository.audit.HBaseBasedAuditRepository; import org.apache.atlas.repository.audit.HBaseBasedAuditRepository;
import org.apache.atlas.repository.audit.HBaseTestUtils; import org.apache.atlas.repository.audit.HBaseTestUtils;
...@@ -230,8 +231,9 @@ public class DefaultMetadataServiceTest { ...@@ -230,8 +231,9 @@ public class DefaultMetadataServiceTest {
assertReferenceableEquals(instance, entity); assertReferenceableEquals(instance, entity);
//Verify that search with reserved characters works - for string attribute //Verify that search with reserved characters works - for string attribute
String responseJson = discoveryService.searchByDSL( String query =
String.format("`%s` where `%s` = '%s'", typeDefinition.typeName, strAttrName, entity.get(strAttrName))); String.format("`%s` where `%s` = '%s'", typeDefinition.typeName, strAttrName, entity.get(strAttrName));
String responseJson = discoveryService.searchByDSL(query, new QueryParams(1, 0));
JSONObject response = new JSONObject(responseJson); JSONObject response = new JSONObject(responseJson);
assertEquals(response.getJSONArray("rows").length(), 1); assertEquals(response.getJSONArray("rows").length(), 1);
} }
......
...@@ -229,22 +229,17 @@ public class LocalAtlasClient extends AtlasClient { ...@@ -229,22 +229,17 @@ public class LocalAtlasClient extends AtlasClient {
} }
@Override @Override
public JSONArray search(final String searchQuery) throws AtlasServiceException { public JSONArray search(final String searchQuery, final int limit, final int offset) throws AtlasServiceException {
throw new IllegalStateException("Not supported in LocalAtlasClient"); throw new IllegalStateException("Not supported in LocalAtlasClient");
} }
@Override @Override
public JSONArray searchByDSL(final String query) throws AtlasServiceException { public JSONArray searchByDSL(final String query, final int limit, final int offset) throws AtlasServiceException {
throw new IllegalStateException("Not supported in LocalAtlasClient"); throw new IllegalStateException("Not supported in LocalAtlasClient");
} }
@Override @Override
public JSONArray searchByGremlin(final String gremlinQuery) throws AtlasServiceException { public JSONObject searchByFullText(final String query, final int limit, final int offset) throws AtlasServiceException {
throw new IllegalStateException("Not supported in LocalAtlasClient");
}
@Override
public JSONObject searchByFullText(final String query) throws AtlasServiceException {
throw new IllegalStateException("Not supported in LocalAtlasClient"); throw new IllegalStateException("Not supported in LocalAtlasClient");
} }
......
...@@ -462,7 +462,7 @@ public class QuickStart { ...@@ -462,7 +462,7 @@ public class QuickStart {
private void search() throws Exception { private void search() throws Exception {
for (String dslQuery : getDSLQueries()) { for (String dslQuery : getDSLQueries()) {
JSONArray results = metadataServiceClient.search(dslQuery); JSONArray results = metadataServiceClient.search(dslQuery, 10, 0);
if (results != null) { if (results != null) {
System.out.println("query [" + dslQuery + "] returned [" + results.length() + "] rows"); System.out.println("query [" + dslQuery + "] returned [" + results.length() + "] rows");
} else { } else {
......
...@@ -20,10 +20,13 @@ package org.apache.atlas.web.resources; ...@@ -20,10 +20,13 @@ package org.apache.atlas.web.resources;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.atlas.AtlasClient; import org.apache.atlas.AtlasClient;
import org.apache.atlas.utils.AtlasPerfTracer; import org.apache.atlas.AtlasProperties;
import org.apache.atlas.utils.ParamChecker; import org.apache.atlas.classification.InterfaceAudience;
import org.apache.atlas.discovery.DiscoveryException; import org.apache.atlas.discovery.DiscoveryException;
import org.apache.atlas.discovery.DiscoveryService; import org.apache.atlas.discovery.DiscoveryService;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.utils.AtlasPerfTracer;
import org.apache.atlas.utils.ParamChecker;
import org.apache.atlas.web.util.Servlets; import org.apache.atlas.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
...@@ -34,6 +37,7 @@ import org.slf4j.LoggerFactory; ...@@ -34,6 +37,7 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject; import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
import javax.ws.rs.Consumes; import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET; import javax.ws.rs.GET;
import javax.ws.rs.Path; import javax.ws.rs.Path;
import javax.ws.rs.Produces; import javax.ws.rs.Produces;
...@@ -42,7 +46,6 @@ import javax.ws.rs.WebApplicationException; ...@@ -42,7 +46,6 @@ import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* Jersey Resource for metadata operations. * Jersey Resource for metadata operations.
...@@ -56,6 +59,7 @@ public class MetadataDiscoveryResource { ...@@ -56,6 +59,7 @@ public class MetadataDiscoveryResource {
private static final String QUERY_TYPE_DSL = "dsl"; private static final String QUERY_TYPE_DSL = "dsl";
private static final String QUERY_TYPE_GREMLIN = "gremlin"; private static final String QUERY_TYPE_GREMLIN = "gremlin";
private static final String QUERY_TYPE_FULLTEXT = "full-text"; private static final String QUERY_TYPE_FULLTEXT = "full-text";
private static final String LIMIT_OFFSET_DEFAULT = "-1";
private final DiscoveryService discoveryService; private final DiscoveryService discoveryService;
...@@ -73,50 +77,49 @@ public class MetadataDiscoveryResource { ...@@ -73,50 +77,49 @@ public class MetadataDiscoveryResource {
/** /**
* Search using a given query. * Search using a given query.
* *
* @param query search query in raw gremlin or DSL format falling back to full text. * @param query search query in DSL format falling back to full text.
* @param limit number of rows to be returned in the result, used for pagination. maxlimit > limit > 0. -1 maps to atlas.search.defaultlimit property value
* @param offset offset to the results returned, used for pagination. offset >= 0. -1 maps to offset 0
* @return JSON representing the type and results. * @return JSON representing the type and results.
*/ */
@GET @GET
@Path("search") @Path("search")
@Consumes(Servlets.JSON_MEDIA_TYPE) @Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE)
public Response search(@QueryParam("query") String query) { public Response search(@QueryParam("query") String query,
JSONObject response; @DefaultValue(LIMIT_OFFSET_DEFAULT) @QueryParam("limit") int limit,
@DefaultValue(LIMIT_OFFSET_DEFAULT) @QueryParam("offset") int offset) {
AtlasPerfTracer perf = null; AtlasPerfTracer perf = null;
try { // fall back to dsl if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MetadataDiscoveryResource.search(" + query + ")");
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MetadataDiscoveryResource.search(" + query + ")"); }
} Response response = searchUsingQueryDSL(query, limit, offset);
if (response.getStatus() != Response.Status.OK.getStatusCode()) {
ParamChecker.notEmpty(query, "query cannot be null"); response = searchUsingFullText(query, limit, offset);
final String jsonResultStr = discoveryService.searchByDSL(query);
response = new DSLJSONResponseBuilder().results(jsonResultStr).query(query).build();
return Response.ok(response).build();
} catch (IllegalArgumentException e) {
LOG.error("Unable to get entity list for empty query", e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable throwable) {
LOG.error("Unable to get entity list for query {} using dsl", query, throwable);
return searchUsingFullText(query);
} finally {
AtlasPerfTracer.log(perf);
} }
AtlasPerfTracer.log(perf);
return response;
} }
/** /**
* Search using query DSL format. * Search using query DSL format.
* *
* @param dslQuery search query in DSL format. * @param dslQuery search query in DSL format.
* @param limit number of rows to be returned in the result, used for pagination. maxlimit > limit > 0. -1 maps to atlas.search.defaultlimit property value
* @param offset offset to the results returned, used for pagination. offset >= 0. -1 maps to offset 0
* Limit and offset in API are used in conjunction with limit and offset in DSL query
* Final limit = min(API limit, max(query limit - API offset, 0))
* Final offset = API offset + query offset
*
* @return JSON representing the type and results. * @return JSON representing the type and results.
*/ */
@GET @GET
@Path("search/dsl") @Path("search/dsl")
@Consumes(Servlets.JSON_MEDIA_TYPE) @Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE)
public Response searchUsingQueryDSL(@QueryParam("query") String dslQuery) { public Response searchUsingQueryDSL(@QueryParam("query") String dslQuery,
@DefaultValue(LIMIT_OFFSET_DEFAULT) @QueryParam("limit") int limit,
@DefaultValue(LIMIT_OFFSET_DEFAULT) @QueryParam("offset") int offset) {
AtlasPerfTracer perf = null; AtlasPerfTracer perf = null;
try { try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
...@@ -124,7 +127,8 @@ public class MetadataDiscoveryResource { ...@@ -124,7 +127,8 @@ public class MetadataDiscoveryResource {
} }
ParamChecker.notEmpty(dslQuery, "dslQuery cannot be null"); ParamChecker.notEmpty(dslQuery, "dslQuery cannot be null");
final String jsonResultStr = discoveryService.searchByDSL(dslQuery); QueryParams queryParams = validateQueryParams(limit, offset);
final String jsonResultStr = discoveryService.searchByDSL(dslQuery, queryParams);
JSONObject response = new DSLJSONResponseBuilder().results(jsonResultStr).query(dslQuery).build(); JSONObject response = new DSLJSONResponseBuilder().results(jsonResultStr).query(dslQuery).build();
...@@ -140,6 +144,28 @@ public class MetadataDiscoveryResource { ...@@ -140,6 +144,28 @@ public class MetadataDiscoveryResource {
} }
} }
private QueryParams validateQueryParams(int limitParam, int offsetParam) {
int maxLimit = AtlasProperties.getProperty(AtlasProperties.AtlasProperty.SEARCH_MAX_LIMIT);
int defaultLimit = AtlasProperties.getProperty(AtlasProperties.AtlasProperty.SEARCH_DEFAULT_LIMIT);
int limit = defaultLimit;
boolean limitSet = (limitParam != Integer.valueOf(LIMIT_OFFSET_DEFAULT));
if (limitSet) {
ParamChecker.lessThan(limitParam, maxLimit, "limit");
ParamChecker.greaterThan(limitParam, 0, "limit");
limit = limitParam;
}
int offset = 0;
boolean offsetSet = (offsetParam != Integer.valueOf(LIMIT_OFFSET_DEFAULT));
if (offsetSet) {
ParamChecker.greaterThan(offsetParam, -1, "offset");
offset = offsetParam;
}
return new QueryParams(limit, offset);
}
/** /**
* Search using raw gremlin query format. * Search using raw gremlin query format.
* *
...@@ -189,13 +215,18 @@ public class MetadataDiscoveryResource { ...@@ -189,13 +215,18 @@ public class MetadataDiscoveryResource {
* Search using full text search. * Search using full text search.
* *
* @param query search query. * @param query search query.
* @param limit number of rows to be returned in the result, used for pagination. maxlimit > limit > 0. -1 maps to atlas.search.defaultlimit property value
* @param offset offset to the results returned, used for pagination. offset >= 0. -1 maps to offset 0
* NOTE: Pagination is not implemented currently for full text search, so limit and offset are not used
* @return JSON representing the type and results. * @return JSON representing the type and results.
*/ */
@GET @GET
@Path("search/fulltext") @Path("search/fulltext")
@Consumes(Servlets.JSON_MEDIA_TYPE) @Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE)
public Response searchUsingFullText(@QueryParam("query") String query) { public Response searchUsingFullText(@QueryParam("query") String query,
@DefaultValue(LIMIT_OFFSET_DEFAULT) @QueryParam("limit") int limit,
@DefaultValue(LIMIT_OFFSET_DEFAULT) @QueryParam("offset") int offset) {
AtlasPerfTracer perf = null; AtlasPerfTracer perf = null;
try { try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
...@@ -203,7 +234,8 @@ public class MetadataDiscoveryResource { ...@@ -203,7 +234,8 @@ public class MetadataDiscoveryResource {
} }
ParamChecker.notEmpty(query, "query cannot be null or empty"); ParamChecker.notEmpty(query, "query cannot be null or empty");
final String jsonResultStr = discoveryService.searchByFullText(query); QueryParams queryParams = validateQueryParams(limit, offset);
final String jsonResultStr = discoveryService.searchByFullText(query, queryParams);
JSONArray rowsJsonArr = new JSONArray(jsonResultStr); JSONArray rowsJsonArr = new JSONArray(jsonResultStr);
JSONObject response = new FullTextJSonResponseBuilder().results(rowsJsonArr).query(query).build(); JSONObject response = new FullTextJSonResponseBuilder().results(rowsJsonArr).query(query).build();
......
...@@ -72,8 +72,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -72,8 +72,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
waitFor(MAX_WAIT_TIME, new Predicate() { waitFor(MAX_WAIT_TIME, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
JSONArray results = serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, entity.get("name")));
entity.get("name")));
return results.length() == 1; return results.length() == 1;
} }
}); });
...@@ -90,8 +89,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -90,8 +89,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
waitFor(MAX_WAIT_TIME, new Predicate() { waitFor(MAX_WAIT_TIME, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
JSONArray results = serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, entity.get("name")));
entity.get("name")));
return results.length() == 1; return results.length() == 1;
} }
}); });
...@@ -146,14 +144,13 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -146,14 +144,13 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
waitFor(MAX_WAIT_TIME, new Predicate() { waitFor(MAX_WAIT_TIME, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
JSONArray results = serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, newName));
newName));
return results.length() == 1; return results.length() == 1;
} }
}); });
//no entity with the old qualified name //no entity with the old qualified name
JSONArray results = serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName)); JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName));
assertEquals(results.length(), 0); assertEquals(results.length(), 0);
} }
...@@ -195,8 +192,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT { ...@@ -195,8 +192,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
waitFor(MAX_WAIT_TIME, new Predicate() { waitFor(MAX_WAIT_TIME, new Predicate() {
@Override @Override
public boolean evaluate() throws Exception { public boolean evaluate() throws Exception {
JSONArray results = serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName));
dbName));
return results.length() == 1; return results.length() == 1;
} }
}); });
......
...@@ -54,6 +54,7 @@ import org.apache.atlas.utils.ParamChecker; ...@@ -54,6 +54,7 @@ import org.apache.atlas.utils.ParamChecker;
import org.apache.atlas.web.util.Servlets; import org.apache.atlas.web.util.Servlets;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -342,4 +343,8 @@ public abstract class BaseResourceIT { ...@@ -342,4 +343,8 @@ public abstract class BaseResourceIT {
} }
}; };
} }
protected JSONArray searchByDSL(String dslQuery) throws AtlasServiceException {
return serviceClient.searchByDSL(dslQuery, 10, 0);
}
} }
...@@ -178,8 +178,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -178,8 +178,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
} }
}); });
JSONArray results = JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName));
serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName));
assertEquals(results.length(), 1); assertEquals(results.length(), 1);
//create entity again shouldn't create another instance with same unique attribute value //create entity again shouldn't create another instance with same unique attribute value
...@@ -197,7 +196,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -197,7 +196,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
//expected timeout //expected timeout
} }
results = serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName)); results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName));
assertEquals(results.length(), 1); assertEquals(results.length(), 1);
//Test the same across references //Test the same across references
...@@ -208,7 +207,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -208,7 +207,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
table.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName); table.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
serviceClient.createEntity(table); serviceClient.createEntity(table);
results = serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName)); results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE, dbName));
assertEquals(results.length(), 1); assertEquals(results.length(), 1);
} }
......
...@@ -24,6 +24,7 @@ import com.sun.jersey.api.client.ClientResponse; ...@@ -24,6 +24,7 @@ import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import org.apache.atlas.AtlasClient; import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct; import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.TypesDef; import org.apache.atlas.typesystem.TypesDef;
...@@ -47,11 +48,16 @@ import javax.ws.rs.core.Response; ...@@ -47,11 +48,16 @@ import javax.ws.rs.core.Response;
import java.util.List; import java.util.List;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.fail;
/** /**
* Search Integration Tests. * Search Integration Tests.
*/ */
public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
private String tagName;
@BeforeClass @BeforeClass
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
...@@ -67,7 +73,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -67,7 +73,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE) ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class); .method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
...@@ -75,16 +81,71 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -75,16 +81,71 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));
Assert.assertEquals(response.getString("query"), dslQuery); assertEquals(response.getString("query"), dslQuery);
Assert.assertEquals(response.getString("queryType"), "dsl"); assertEquals(response.getString("queryType"), "dsl");
JSONArray results = response.getJSONArray(AtlasClient.RESULTS); JSONArray results = response.getJSONArray(AtlasClient.RESULTS);
Assert.assertNotNull(results); Assert.assertNotNull(results);
Assert.assertEquals(results.length(), 1); assertEquals(results.length(), 1);
int numRows = response.getInt(AtlasClient.COUNT); int numRows = response.getInt(AtlasClient.COUNT);
Assert.assertEquals(numRows, 1); assertEquals(numRows, 1);
}
@Test
public void testSearchDSLLimits() throws Exception {
Referenceable entity = new Referenceable("dsl_test_type");
entity.set("name", randomString());
entity.set("description", randomString());
createInstance(entity);
//search without new parameters of limit and offset should work
String dslQuery = "from dsl_test_type";
WebResource resource = service.path("api/atlas/discovery/search/dsl").queryParam("query", dslQuery);
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class);
assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
//higher limit, all results returned
JSONArray results = serviceClient.searchByDSL(dslQuery, 10, 0);
assertEquals(results.length(), 2);
//default limit and offset -1, all results returned
results = serviceClient.searchByDSL(dslQuery, -1, -1);
assertEquals(results.length(), 2);
//uses the limit parameter passed
results = serviceClient.searchByDSL(dslQuery, 1, 0);
assertEquals(results.length(), 1);
//uses the offset parameter passed
results = serviceClient.searchByDSL(dslQuery, 10, 1);
assertEquals(results.length(), 1);
//limit > 0
try {
serviceClient.searchByDSL(dslQuery, 0, 10);
fail("Expected BAD_REQUEST");
} catch (AtlasServiceException e) {
assertEquals(e.getStatus(), ClientResponse.Status.BAD_REQUEST, "Got " + e.getStatus());
}
//limit > maxlimit
try {
serviceClient.searchByDSL(dslQuery, Integer.MAX_VALUE, 10);
fail("Expected BAD_REQUEST");
} catch (AtlasServiceException e) {
assertEquals(e.getStatus(), ClientResponse.Status.BAD_REQUEST, "Got " + e.getStatus());
}
//offset >= 0
try {
serviceClient.searchByDSL(dslQuery, 10, -2);
fail("Expected BAD_REQUEST");
} catch (AtlasServiceException e) {
assertEquals(e.getStatus(), ClientResponse.Status.BAD_REQUEST, "Got " + e.getStatus());
}
} }
@Test @Test
...@@ -94,7 +155,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -94,7 +155,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE) ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class); .method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode()); assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
} }
@Test @Test
...@@ -104,7 +165,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -104,7 +165,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE) ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class); .method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
...@@ -112,8 +173,8 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -112,8 +173,8 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));
Assert.assertEquals(response.getString("query"), query); assertEquals(response.getString("query"), query);
Assert.assertEquals(response.getString("queryType"), "gremlin"); assertEquals(response.getString("queryType"), "gremlin");
} }
@Test @Test
...@@ -123,7 +184,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -123,7 +184,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE) ClientResponse clientResponse = resource.accept(Servlets.JSON_MEDIA_TYPE).type(Servlets.JSON_MEDIA_TYPE)
.method(HttpMethod.GET, ClientResponse.class); .method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class); String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
...@@ -131,29 +192,28 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -131,29 +192,28 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));
Assert.assertEquals(response.getString("query"), query); assertEquals(response.getString("query"), query);
Assert.assertEquals(response.getString("queryType"), "dsl"); assertEquals(response.getString("queryType"), "dsl");
} }
@Test(enabled = false) @Test
public void testSearchUsingFullText() throws Exception { public void testSearchUsingFullText() throws Exception {
String query = "foundation_etl"; JSONObject response = serviceClient.searchByFullText(tagName, 10, 0);
JSONObject response = serviceClient.searchByFullText(query);
Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));
Assert.assertEquals(response.getString("query"), query); assertEquals(response.getString("query"), tagName);
Assert.assertEquals(response.getString("queryType"), "full-text"); assertEquals(response.getString("queryType"), "full-text");
JSONArray results = response.getJSONArray(AtlasClient.RESULTS); JSONArray results = response.getJSONArray(AtlasClient.RESULTS);
Assert.assertEquals(results.length(), 1); assertEquals(results.length(), 1, "Results: " + results);
JSONObject row = results.getJSONObject(0); JSONObject row = results.getJSONObject(0);
Assert.assertNotNull(row.get("guid")); Assert.assertNotNull(row.get("guid"));
Assert.assertEquals(row.getString("typeName"), "dsl_test_type"); assertEquals(row.getString("typeName"), "dsl_test_type");
Assert.assertNotNull(row.get("score")); Assert.assertNotNull(row.get("score"));
int numRows = response.getInt(AtlasClient.COUNT); int numRows = response.getInt(AtlasClient.COUNT);
Assert.assertEquals(numRows, 1); assertEquals(numRows, 1);
} }
private void createTypes() throws Exception { private void createTypes() throws Exception {
...@@ -165,37 +225,22 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { ...@@ -165,37 +225,22 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil
.createTraitTypeDef("Classification", ImmutableSet.<String>of(), .createTraitTypeDef("Classification", ImmutableSet.<String>of(),
TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE)); TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<TraitType> piiTrait =
TypesUtil.createTraitTypeDef("PII_TYPE", ImmutableSet.<String>of());
HierarchicalTypeDefinition<TraitType> phiTrait =
TypesUtil.createTraitTypeDef("PHI", ImmutableSet.<String>of());
HierarchicalTypeDefinition<TraitType> pciTrait =
TypesUtil.createTraitTypeDef("PCI", ImmutableSet.<String>of());
HierarchicalTypeDefinition<TraitType> soxTrait =
TypesUtil.createTraitTypeDef("SOX", ImmutableSet.<String>of());
HierarchicalTypeDefinition<TraitType> secTrait =
TypesUtil.createTraitTypeDef("SEC", ImmutableSet.<String>of());
HierarchicalTypeDefinition<TraitType> financeTrait =
TypesUtil.createTraitTypeDef("Finance", ImmutableSet.<String>of());
TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(), TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
ImmutableList ImmutableList.of(classificationTraitDefinition), ImmutableList.of(dslTestTypeDefinition));
.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait, soxTrait, secTrait,
financeTrait), ImmutableList.of(dslTestTypeDefinition));
createType(typesDef); createType(typesDef);
} }
private Id createInstance() throws Exception { private Id createInstance() throws Exception {
Referenceable entityInstance = Referenceable entityInstance = new Referenceable("dsl_test_type", "Classification");
new Referenceable("dsl_test_type", "Classification", "PII_TYPE", "PHI", "PCI", "SOX", "SEC", "Finance");
entityInstance.set("name", "foo name"); entityInstance.set("name", "foo name");
entityInstance.set("description", "bar description"); entityInstance.set("description", "bar description");
Struct traitInstance = (Struct) entityInstance.getTrait("Classification"); Struct traitInstance = (Struct) entityInstance.getTrait("Classification");
traitInstance.set("tag", "foundation_etl"); tagName = randomString();
traitInstance.set("tag", tagName);
List<String> traits = entityInstance.getTraits(); List<String> traits = entityInstance.getTraits();
Assert.assertEquals(traits.size(), 7); assertEquals(traits.size(), 1);
return createInstance(entityInstance); return createInstance(entityInstance);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment