Commit b65dd91c by Shwetha GS

ATLAS-713 Entity lineage based on entity id (shwethags)

parent 857561a3
......@@ -90,7 +90,8 @@ public class AtlasClient {
public static final String URI_ENTITY = "entities";
public static final String URI_ENTITY_AUDIT = "audit";
public static final String URI_SEARCH = "discovery/search";
public static final String URI_LINEAGE = "lineage/hive/table";
public static final String URI_NAME_LINEAGE = "lineage/hive/table";
public static final String URI_LINEAGE = "lineage/";
public static final String URI_TRAITS = "traits";
public static final String QUERY = "query";
......@@ -416,7 +417,12 @@ public class AtlasClient {
SEARCH_GREMLIN(BASE_URI + URI_SEARCH + "/gremlin", HttpMethod.GET, Response.Status.OK),
SEARCH_FULL_TEXT(BASE_URI + URI_SEARCH + "/fulltext", HttpMethod.GET, Response.Status.OK),
//Lineage operations
//Lineage operations based on dataset name
NAME_LINEAGE_INPUTS_GRAPH(BASE_URI + URI_NAME_LINEAGE, HttpMethod.GET, Response.Status.OK),
NAME_LINEAGE_OUTPUTS_GRAPH(BASE_URI + URI_NAME_LINEAGE, HttpMethod.GET, Response.Status.OK),
NAME_LINEAGE_SCHEMA(BASE_URI + URI_NAME_LINEAGE, HttpMethod.GET, Response.Status.OK),
//Lineage operations based on entity id of the dataset
LINEAGE_INPUTS_GRAPH(BASE_URI + URI_LINEAGE, HttpMethod.GET, Response.Status.OK),
LINEAGE_OUTPUTS_GRAPH(BASE_URI + URI_LINEAGE, HttpMethod.GET, Response.Status.OK),
LINEAGE_SCHEMA(BASE_URI + URI_LINEAGE, HttpMethod.GET, Response.Status.OK);
......@@ -988,7 +994,7 @@ public class AtlasClient {
}
public JSONObject getInputGraph(String datasetName) throws AtlasServiceException {
JSONObject response = callAPI(API.LINEAGE_INPUTS_GRAPH, null, datasetName, "/inputs/graph");
JSONObject response = callAPI(API.NAME_LINEAGE_INPUTS_GRAPH, null, datasetName, "/inputs/graph");
try {
return response.getJSONObject(AtlasClient.RESULTS);
} catch (JSONException e) {
......@@ -997,7 +1003,34 @@ public class AtlasClient {
}
public JSONObject getOutputGraph(String datasetName) throws AtlasServiceException {
JSONObject response = callAPI(API.LINEAGE_OUTPUTS_GRAPH, null, datasetName, "/outputs/graph");
JSONObject response = callAPI(API.NAME_LINEAGE_OUTPUTS_GRAPH, null, datasetName, "/outputs/graph");
try {
return response.getJSONObject(AtlasClient.RESULTS);
} catch (JSONException e) {
throw new AtlasServiceException(e);
}
}
public JSONObject getInputGraphForEntity(String entityId) throws AtlasServiceException {
JSONObject response = callAPI(API.LINEAGE_INPUTS_GRAPH, null, entityId, "/inputs/graph");
try {
return response.getJSONObject(AtlasClient.RESULTS);
} catch (JSONException e) {
throw new AtlasServiceException(e);
}
}
public JSONObject getOutputGraphForEntity(String datasetId) throws AtlasServiceException {
JSONObject response = callAPI(API.LINEAGE_OUTPUTS_GRAPH, null, datasetId, "/outputs/graph");
try {
return response.getJSONObject(AtlasClient.RESULTS);
} catch (JSONException e) {
throw new AtlasServiceException(e);
}
}
public JSONObject getSchemaForEntity(String datasetId) throws AtlasServiceException {
JSONObject response = callAPI(API.LINEAGE_OUTPUTS_GRAPH, null, datasetId, "/schema");
try {
return response.getJSONObject(AtlasClient.RESULTS);
} catch (JSONException e) {
......
......@@ -23,7 +23,7 @@ define(['require',
'use strict';
var VLineage = VBaseModel.extend({
urlRoot: Globals.baseURL + 'api/atlas/lineage/hive/table/assetName/outputs/graph',
urlRoot: Globals.baseURL + 'api/atlas/lineage/assetName/outputs/graph',
defaults: {},
......@@ -36,7 +36,7 @@ define(['require',
this.bindErrorEvents();
},
toString: function() {
return this.get('name');
return this.get('id');
},
}, {});
return VLineage;
......
......@@ -22,7 +22,7 @@ define(['require',
], function(require, Globals, VBaseModel) {
'use strict';
var VSchema = VBaseModel.extend({
urlRoot: Globals.baseURL + '/api/atlas/lineage/hive/table/log_fact_daily_mv/schema',
urlRoot: Globals.baseURL + '/api/atlas/lineage/log_fact_daily_mv/schema',
defaults: {},
......@@ -35,7 +35,7 @@ define(['require',
this.bindErrorEvents();
},
toString: function() {
return this.get('name');
return this.get('id');
},
}, {});
return VSchema;
......
......@@ -92,7 +92,7 @@ define(['require',
this.renderEntityDetailTableLayoutView();
this.renderTagTableLayoutView(tagGuid);
this.renderLineageLayoutView(tagGuid);
this.renderSchemaLayoutView();
this.renderSchemaLayoutView(tagGuid);
}, this);
},
onRender: function() {},
......@@ -120,17 +120,17 @@ define(['require',
require(['views/graph/LineageLayoutView'], function(LineageLayoutView) {
that.RLineageLayoutView.show(new LineageLayoutView({
globalVent: that.globalVent,
assetName: that.name,
assetName: tagGuid,
guid: tagGuid
}));
});
},
renderSchemaLayoutView: function() {
renderSchemaLayoutView: function(tagGuid) {
var that = this;
require(['views/schema/SchemaLayoutView'], function(SchemaLayoutView) {
that.RSchemaTableLayoutView.show(new SchemaLayoutView({
globalVent: that.globalVent,
name: that.name,
name: tagGuid,
vent: that.vent
}));
});
......
......@@ -56,8 +56,8 @@ define(['require',
this.inputCollection = new VLineageList();
this.outputCollection = new VLineageList();
this.entityModel = new VEntity();
this.inputCollection.url = "/api/atlas/lineage/hive/table/" + this.assetName + "/inputs/graph";
this.outputCollection.url = "/api/atlas/lineage/hive/table/" + this.assetName + "/outputs/graph";
this.inputCollection.url = "/api/atlas/lineage/" + this.assetName + "/inputs/graph";
this.outputCollection.url = "/api/atlas/lineage/" + this.assetName + "/outputs/graph";
this.bindEvents();
this.fetchGraphData();
this.data = {};
......
......@@ -73,7 +73,7 @@ define(['require',
initialize: function(options) {
_.extend(this, _.pick(options, 'globalVent', 'name', 'vent'));
this.schemaCollection = new VSchemaList([], {});
this.schemaCollection.url = "/api/atlas/lineage/hive/table/" + this.name + "/schema";
this.schemaCollection.url = "/api/atlas/lineage/" + this.name + "/schema";
this.commonTableOptions = {
collection: this.schemaCollection,
includeFilter: false,
......
......@@ -63,15 +63,9 @@ atlas.kafka.auto.commit.enable=false
######### Hive Lineage Configs #########
# This models reflects the base super types for Data and Process
#atlas.lineage.hive.table.type.name=DataSet
#atlas.lineage.hive.process.type.name=Process
#atlas.lineage.hive.process.inputs.name=inputs
#atlas.lineage.hive.process.outputs.name=outputs
## Schema
atlas.lineage.hive.table.schema.query.hive_table=hive_table where name='%s'\, columns
atlas.lineage.hive.table.schema.query.Table=Table where name='%s'\, columns
atlas.lineage.schema.query.hive_table=hive_table where __guid='%s'\, columns
atlas.lineage.schema.query.Table=Table where __guid='%s'\, columns
## Server port configuration
#atlas.server.http.port=21000
......
......@@ -21,6 +21,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES:
ATLAS-713 Entity lineage based on entity id (shwethags)
ATLAS-736 UI - BUG :: displaying timestamp values for hive_db description (kevalbhatt18 via yhemanth)
ATLAS-784 Configure config.store.uri for Falcon hook IT (yhemanth)
ATLAS-645 FieldMapping.output() results in stack overflow when instances reference each other (dkantor via shwethags)
......
......@@ -26,7 +26,7 @@ import com.google.inject.throwingproviders.ThrowingProviderBinder;
import com.thinkaurelius.titan.core.TitanGraph;
import org.aopalliance.intercept.MethodInterceptor;
import org.apache.atlas.discovery.DiscoveryService;
import org.apache.atlas.discovery.HiveLineageService;
import org.apache.atlas.discovery.DataSetLineageService;
import org.apache.atlas.discovery.LineageService;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.listener.EntityChangeListener;
......@@ -83,7 +83,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
// bind the DiscoveryService interface to an implementation
bind(DiscoveryService.class).to(GraphBackedDiscoveryService.class).asEagerSingleton();
bind(LineageService.class).to(HiveLineageService.class).asEagerSingleton();
bind(LineageService.class).to(DataSetLineageService.class).asEagerSingleton();
bindAuditRepository(binder());
......
......@@ -256,7 +256,8 @@ trait SingleInstanceClosureQuery[T] extends ClosureQuery {
* @param persistenceStrategy as needed to evaluate the Closure Query.
* @param g as needed to evaluate the Closure Query.
*/
case class HiveLineageQuery(tableTypeName : String,
case class InputLineageClosureQuery(tableTypeName : String,
attributeToSelectInstance : String,
tableName : String,
ctasTypeName : String,
ctasInputTableAttribute : String,
......@@ -270,7 +271,6 @@ case class HiveLineageQuery(tableTypeName : String,
val closureType : String = tableTypeName
val attributeToSelectInstance = "name"
val attributeTyp = DataTypes.STRING_TYPE
val instanceValue = tableName
......@@ -296,7 +296,8 @@ case class HiveLineageQuery(tableTypeName : String,
* @param persistenceStrategy as needed to evaluate the Closure Query.
* @param g as needed to evaluate the Closure Query.
*/
case class HiveWhereUsedQuery(tableTypeName : String,
case class OutputLineageClosureQuery(tableTypeName : String,
attributeToSelectInstance : String,
tableName : String,
ctasTypeName : String,
ctasInputTableAttribute : String,
......@@ -310,7 +311,6 @@ case class HiveWhereUsedQuery(tableTypeName : String,
val closureType : String = tableTypeName
val attributeToSelectInstance = "name"
val attributeTyp = DataTypes.STRING_TYPE
val instanceValue = tableName
......
......@@ -55,7 +55,7 @@ import java.util.List;
* Base Class to set up hive types and instances for tests
*/
@Guice(modules = RepositoryMetadataModule.class)
public class BaseHiveRepositoryTest {
public class BaseRepositoryTest {
@Inject
protected MetadataService metadataService;
......@@ -94,8 +94,8 @@ public class BaseHiveRepositoryTest {
metadataService.createType(typesAsJSON);
}
private static final String DATABASE_TYPE = "hive_db";
private static final String HIVE_TABLE_TYPE = "hive_table";
protected static final String DATABASE_TYPE = "hive_db";
protected static final String HIVE_TABLE_TYPE = "hive_table";
private static final String COLUMN_TYPE = "hive_column";
private static final String HIVE_PROCESS_TYPE = "hive_process";
private static final String STORAGE_DESC_TYPE = "StorageDesc";
......@@ -104,7 +104,8 @@ public class BaseHiveRepositoryTest {
TypesDef createTypeDefinitions() {
HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
.createClassTypeDef(DATABASE_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
.createClassTypeDef(DATABASE_TYPE, null,
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
attrDef("description", DataTypes.STRING_TYPE), attrDef("locationUri", DataTypes.STRING_TYPE),
attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE));
......@@ -127,8 +128,7 @@ public class BaseHiveRepositoryTest {
attrDef("temporary", DataTypes.BOOLEAN_TYPE),
new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
// todo - uncomment this, something is broken
new AttributeDefinition("sd", STORAGE_DESC_TYPE,
Multiplicity.REQUIRED, true, null),
new AttributeDefinition("sd", STORAGE_DESC_TYPE, Multiplicity.REQUIRED, true, null),
new AttributeDefinition("columns", DataTypes.arrayTypeName(COLUMN_TYPE),
Multiplicity.COLLECTION, true, null));
......@@ -285,7 +285,7 @@ public class BaseHiveRepositoryTest {
return createInstance(referenceable, clsType);
}
Referenceable storageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed, List<Referenceable> columns)
protected Referenceable storageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed, List<Referenceable> columns)
throws Exception {
Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE);
referenceable.set("location", location);
......@@ -297,7 +297,7 @@ public class BaseHiveRepositoryTest {
return referenceable;
}
Referenceable column(String name, String dataType, String comment, String... traitNames) throws Exception {
protected Referenceable column(String name, String dataType, String comment, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("dataType", dataType);
......@@ -306,7 +306,7 @@ public class BaseHiveRepositoryTest {
return referenceable;
}
Id table(String name, String description, Id dbId, Referenceable sd, String owner, String tableType,
protected Id table(String name, String description, Id dbId, Referenceable sd, String owner, String tableType,
List<Referenceable> columns, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames);
referenceable.set("name", name);
......@@ -327,12 +327,12 @@ public class BaseHiveRepositoryTest {
return createInstance(referenceable, clsType);
}
Id loadProcess(String name, String description, String user, List<Id> inputTables, List<Id> outputTables,
protected Id loadProcess(String name, String description, String user, List<Id> inputTables, List<Id> outputTables,
String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames)
throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set(AtlasClient.NAME, name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("name", name);
referenceable.set("qualifiedName", name);
referenceable.set("description", description);
referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis());
......
......@@ -20,7 +20,7 @@ package org.apache.atlas.discovery;
import com.google.common.collect.ImmutableSet;
import org.apache.atlas.BaseHiveRepositoryTest;
import org.apache.atlas.BaseRepositoryTest;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.RequestContext;
import org.apache.atlas.TestUtils;
......@@ -60,7 +60,7 @@ import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
@Guice(modules = RepositoryMetadataModule.class)
public class GraphBackedDiscoveryServiceTest extends BaseHiveRepositoryTest {
public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
@Inject
private MetadataRepository repositoryService;
......
......@@ -107,7 +107,7 @@ class GremlinTest2 extends BaseGremlinTest {
}
@Test def testHighLevelLineage {
val r = HiveLineageQuery("Table", "sales_fact_monthly_mv",
val r = InputLineageClosureQuery("Table", "name", "sales_fact_monthly_mv",
"LoadProcess",
"inputTables",
"outputTable",
......@@ -116,7 +116,7 @@ class GremlinTest2 extends BaseGremlinTest {
}
@Test def testHighLevelLineageReturnGraph {
val r = HiveLineageQuery("Table", "sales_fact_monthly_mv",
val r = InputLineageClosureQuery("Table", "name", "sales_fact_monthly_mv",
"LoadProcess",
"inputTables",
"outputTable",
......@@ -127,7 +127,7 @@ class GremlinTest2 extends BaseGremlinTest {
}
@Test def testHighLevelWhereUsed {
val r = HiveWhereUsedQuery("Table", "sales_fact",
val r = OutputLineageClosureQuery("Table", "name", "sales_fact",
"LoadProcess",
"inputTables",
"outputTable",
......@@ -136,7 +136,7 @@ class GremlinTest2 extends BaseGremlinTest {
}
@Test def testHighLevelWhereUsedReturnGraph {
val r = HiveWhereUsedQuery("Table", "sales_fact",
val r = OutputLineageClosureQuery("Table", "name", "sales_fact",
"LoadProcess",
"inputTables",
"outputTable",
......
......@@ -26,42 +26,50 @@ import org.apache.atlas.AtlasException;
public interface LineageService {
/**
* Return the lineage outputs for the given tableName.
* Return the lineage outputs graph for the given datasetName.
*
* @param tableName tableName
* @return Outputs as JSON
* @param datasetName datasetName
* @return Outputs Graph as JSON
*/
String getOutputs(String tableName) throws AtlasException;
String getOutputsGraph(String datasetName) throws AtlasException;
/**
* Return the lineage outputs graph for the given tableName.
* Return the lineage inputs graph for the given datasetName.
*
* @param tableName tableName
* @return Outputs Graph as JSON
* @param datasetName datasetName
* @return Inputs Graph as JSON
*/
String getOutputsGraph(String tableName) throws AtlasException;
String getInputsGraph(String datasetName) throws AtlasException;
/**
* Return the lineage inputs for the given tableName.
* Return the lineage inputs graph for the given entity id.
*
* @param tableName tableName
* @return Inputs as JSON
* @param guid entity id
* @return Inputs Graph as JSON
*/
String getInputs(String tableName) throws AtlasException;
String getInputsGraphForEntity(String guid) throws AtlasException;
/**
* Return the lineage inputs graph for the given tableName.
* Return the lineage inputs graph for the given entity id.
*
* @param tableName tableName
* @param guid entity id
* @return Inputs Graph as JSON
*/
String getInputsGraph(String tableName) throws AtlasException;
String getOutputsGraphForEntity(String guid) throws AtlasException;
/**
* Return the schema for the given datasetName.
*
* @param datasetName datasetName
* @return Schema as JSON
*/
String getSchema(String datasetName) throws AtlasException;
/**
* Return the schema for the given tableName.
* Return the schema for the given entity id.
*
* @param tableName tableName
* @param guid tableName
* @return Schema as JSON
*/
String getSchema(String tableName) throws AtlasException;
String getSchemaForEntity(String guid) throws AtlasException;
}
......@@ -49,14 +49,8 @@ atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address}
######### Hive Lineage Configs #########
# This models reflects the base super types for Data and Process
#atlas.lineage.hive.table.type.name=DataSet
#atlas.lineage.hive.process.type.name=Process
#atlas.lineage.hive.process.inputs.name=inputs
#atlas.lineage.hive.process.outputs.name=outputs
## Schema
atlas.lineage.hive.table.schema.query.hive_table=hive_table where name='%s'\, columns
atlas.lineage.schema.query.hive_table=hive_table where __guid='%s'\, columns
######### Notification Configs #########
atlas.notification.embedded=true
......
......@@ -19,10 +19,9 @@
package org.apache.atlas.web.resources;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.utils.ParamChecker;
import org.apache.atlas.discovery.DiscoveryException;
import org.apache.atlas.discovery.LineageService;
import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
......@@ -45,9 +44,9 @@ import javax.ws.rs.core.Response;
*/
@Path("lineage/hive")
@Singleton
public class HiveLineageResource {
public class DataSetLineageResource {
private static final Logger LOG = LoggerFactory.getLogger(HiveLineageResource.class);
private static final Logger LOG = LoggerFactory.getLogger(DataSetLineageResource.class);
private final LineageService lineageService;
......@@ -58,7 +57,7 @@ public class HiveLineageResource {
* @param lineageService lineage service handle
*/
@Inject
public HiveLineageResource(LineageService lineageService) {
public DataSetLineageResource(LineageService lineageService) {
this.lineageService = lineageService;
}
......@@ -75,7 +74,6 @@ public class HiveLineageResource {
LOG.info("Fetching lineage inputs graph for tableName={}", tableName);
try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getInputsGraph(tableName);
JSONObject response = new JSONObject();
......@@ -109,7 +107,6 @@ public class HiveLineageResource {
LOG.info("Fetching lineage outputs graph for tableName={}", tableName);
try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getOutputsGraph(tableName);
JSONObject response = new JSONObject();
......@@ -143,7 +140,6 @@ public class HiveLineageResource {
LOG.info("Fetching schema for tableName={}", tableName);
try {
ParamChecker.notEmpty(tableName, "table name cannot be null");
final String jsonResult = lineageService.getSchema(tableName);
JSONObject response = new JSONObject();
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.web.resources;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.discovery.DiscoveryException;
import org.apache.atlas.discovery.LineageService;
import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
@Path("lineage")
@Singleton
public class LineageResource {
private static final Logger LOG = LoggerFactory.getLogger(DataSetLineageResource.class);
private final LineageService lineageService;
/**
* Created by the Guice ServletModule and injected with the
* configured LineageService.
*
* @param lineageService lineage service handle
*/
@Inject
public LineageResource(LineageService lineageService) {
this.lineageService = lineageService;
}
/**
* Returns input lineage graph for the given entity id.
* @param guid dataset entity id
* @return
*/
@GET
@Path("{guid}/inputs/graph")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response inputsGraph(@PathParam("guid") String guid) {
LOG.info("Fetching lineage inputs graph for guid={}", guid);
try {
final String jsonResult = lineageService.getInputsGraphForEntity(guid);
JSONObject response = new JSONObject();
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("entity not found for guid={}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage inputs graph for entity guid={}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get lineage inputs graph for entity guid={}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
/**
* Returns the outputs graph for a given entity id.
*
* @param guid dataset entity id
*/
@GET
@Path("{guid}/outputs/graph")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response outputsGraph(@PathParam("guid") String guid) {
LOG.info("Fetching lineage outputs graph for entity guid={}", guid);
try {
final String jsonResult = lineageService.getOutputsGraphForEntity(guid);
JSONObject response = new JSONObject();
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("table entity not found for {}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get lineage outputs graph for entity guid={}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get lineage outputs graph for entity guid={}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
/**
* Returns the schema for the given dataset id.
*
* @param guid dataset entity id
*/
@GET
@Path("{guid}/schema")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
public Response schema(@PathParam("guid") String guid) {
LOG.info("Fetching schema for entity guid={}", guid);
try {
final String jsonResult = lineageService.getSchemaForEntity(guid);
JSONObject response = new JSONObject();
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("table entity not found for {}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (DiscoveryException | IllegalArgumentException e) {
LOG.error("Unable to get schema for entity guid={}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (Throwable e) {
LOG.error("Unable to get schema for entity={}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
}
}
}
......@@ -38,7 +38,7 @@ import java.util.List;
/**
* Hive Lineage Integration Tests.
*/
public class HiveLineageJerseyResourceIT extends BaseResourceIT {
public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
private static final String BASE_URI = "api/atlas/lineage/hive/table/";
private String salesFactTable;
......@@ -81,6 +81,22 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
}
@Test
public void testInputsGraphForEntity() throws Exception {
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, "name", salesMonthlyTable).getId()._getId();
JSONObject results = serviceClient.getInputGraphForEntity(tableId);
Assert.assertNotNull(results);
JSONObject values = results.getJSONObject("values");
Assert.assertNotNull(values);
final JSONObject vertices = values.getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 4);
final JSONObject edges = values.getJSONObject("edges");
Assert.assertEquals(edges.length(), 4);
}
@Test
public void testOutputsGraph() throws Exception {
WebResource resource = service.path(BASE_URI).path(salesFactTable).path("outputs").path("graph");
......@@ -109,6 +125,22 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
}
@Test
public void testOutputsGraphForEntity() throws Exception {
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, "name", salesFactTable).getId()._getId();
JSONObject results = serviceClient.getOutputGraphForEntity(tableId);
Assert.assertNotNull(results);
JSONObject values = results.getJSONObject("values");
Assert.assertNotNull(values);
final JSONObject vertices = values.getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 3);
final JSONObject edges = values.getJSONObject("edges");
Assert.assertEquals(edges.length(), 4);
}
@Test
public void testSchema() throws Exception {
WebResource resource = service.path(BASE_URI).path(salesFactTable).path("schema");
......@@ -139,6 +171,24 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
}
@Test
public void testSchemaForEntity() throws Exception {
String tableId = serviceClient.getEntity(HIVE_TABLE_TYPE, "name", salesFactTable).getId()._getId();
JSONObject results = serviceClient.getSchemaForEntity(tableId);
Assert.assertNotNull(results);
JSONArray rows = results.getJSONArray("rows");
Assert.assertEquals(rows.length(), 4);
for (int index = 0; index < rows.length(); index++) {
final JSONObject row = rows.getJSONObject(index);
Assert.assertNotNull(row.getString("name"));
Assert.assertNotNull(row.getString("comment"));
Assert.assertNotNull(row.getString("dataType"));
Assert.assertEquals(row.getString("$typeName$"), "hive_column");
}
}
@Test
public void testSchemaForEmptyTable() throws Exception {
WebResource resource = service.path(BASE_URI).path("").path("schema");
......@@ -184,8 +234,7 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
table("sales_fact_daily_mv" + randomString(), "sales fact daily materialized view", reportingDB,
"Joe BI", "MANAGED", salesFactColumns, "Metric");
String procName = "loadSalesDaily" + randomString();
loadProcess(procName, "John ETL", ImmutableList.of(salesFact, timeDim),
loadProcess("loadSalesDaily" + randomString(), "John ETL", ImmutableList.of(salesFact, timeDim),
ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL");
salesMonthlyTable = "sales_fact_monthly_mv" + randomString();
......@@ -238,8 +287,8 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
Id loadProcess(String name, String user, List<Id> inputTables, List<Id> outputTables, String queryText,
String queryPlan, String queryId, String queryGraph, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set(AtlasClient.NAME, name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("name", name);
referenceable.set("qualifiedName", name);
referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis());
referenceable.set("endTime", System.currentTimeMillis() + 10000);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment