Commit 2bd124a0 by Ballistar13

Moved AEnitityBean to metadata-common

Fixed LineageBean & LineageBridge.
parent 555960a5
......@@ -32,7 +32,6 @@
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-bridge-hive</artifactId>
<version>0.1-incubating-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
......
......@@ -3,7 +3,7 @@ package org.apache.hadoop.metadata.bridge.hivelineage;
import javax.inject.Inject;
import org.apache.hadoop.metadata.bridge.ABridge;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineageBean;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage;
import org.apache.hadoop.metadata.repository.MetadataRepository;
public class HiveLineageBridge extends ABridge {
......@@ -11,7 +11,7 @@ public class HiveLineageBridge extends ABridge {
@Inject
public HiveLineageBridge(MetadataRepository mr) {
super(mr);
this.typeBeanClasses.add(HiveLineageBean.class);
this.typeBeanClasses.add(HiveLineage.class);
}
}
package org.apache.hadoop.metadata.bridge.hivelineage.hook;
import java.io.Serializable;
import java.util.List;
import java.util.ArrayList;
import org.apache.hadoop.metadata.bridge.AEnitityBean;
public class HiveLineageBean extends AEnitityBean implements Serializable {
public class HiveLineage extends AEnitityBean implements Serializable {
/**
*
......
......@@ -20,7 +20,7 @@ import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.hadoop.metadata.bridge.hivelineage.HiveLineageBridge;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineageBean;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage;
import org.apache.hadoop.metadata.storage.RepositoryException;
import com.google.gson.Gson;
......@@ -52,7 +52,7 @@ public class HiveLineageResource {
@Produces(MediaType.APPLICATION_JSON)
public JsonElement getById(@PathParam("id") String id) throws RepositoryException {
// get the lineage bean
HiveLineageBean hlb = bridge.get(id);
HiveLineage hlb = bridge.get(id);
// turn it into a JsonTree & return
return new Gson().toJsonTree(hlb);
}
......@@ -78,7 +78,7 @@ public class HiveLineageResource {
Reader reader = new InputStreamReader(request.getInputStream());
try {
// deserialize
HiveLineageBean bean = new Gson().fromJson(reader, HiveLineageBean.class);
HiveLineage bean = new Gson().fromJson(reader, HiveLineage.class);
String id = bridge.create(bean);
JsonObject jo = new JsonObject();
......
......@@ -11,7 +11,7 @@ import org.apache.commons.collections.IteratorUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineageBean;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.storage.RepositoryException;
import org.testng.Assert;
......@@ -29,7 +29,7 @@ public class TestHiveLineageBridge {
MetadataRepository repo;
HiveLineageBridge bridge;
HiveLineageBean hlb;
HiveLineage hlb;
// the id of one.json in the repo (test #1)
String oneId;
......@@ -43,7 +43,7 @@ public class TestHiveLineageBridge {
FileInputStream fis = new FileInputStream("one.json");
List<String> lines = IOUtils.readLines(fis);
String json = StringUtils.join(lines, "");
hlb = new Gson().fromJson(json, HiveLineageBean.class);
hlb = new Gson().fromJson(json, HiveLineage.class);
}
@Test(priority = 1, enabled = false)
......@@ -57,7 +57,7 @@ public class TestHiveLineageBridge {
@Test(priority = 2, enabled = false)
public void testGet() throws RepositoryException, IOException {
HiveLineageBean bean = bridge.get(oneId);
HiveLineage bean = bridge.get(oneId);
Assert.assertEquals(hlb, bean);
}
......
......@@ -30,5 +30,9 @@
<version>0.13.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-common</artifactId>
</dependency>
</dependencies>
</project>
\ No newline at end of file
package org.apache.hadoop.metadata.bridge.hivelineage.hook;
import java.io.Serializable;
import java.util.List;
import java.util.ArrayList;
import org.apache.hadoop.metadata.bridge.AEnitityBean;
public class HiveLineage extends AEnitityBean implements Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
public String queryId;
public String hiveId;
public String user;
public String queryStartTime;
public String queryEndTime;
public String query;
public String tableName;
public String tableLocation;
public boolean success;
public boolean failed;
public String executionEngine;
ArrayList<SourceTables> sourceTables;
ArrayList<QueryColumns> queryColumns;
ArrayList<WhereClause> whereClause;
ArrayList<CreateColumns> createColumns;
ArrayList<GroupBy> groupBy;
ArrayList<GroupBy> orderBy;
public String getQueryId() {
return this.queryId ;
}
public void setQueryId(String queryId) {
this.queryId = queryId;
}
public String getExecutionEngine() {
return this.executionEngine ;
}
public void setExecutionEngine(String executionEngine) {
this.executionEngine = executionEngine;
}
public String getHiveId() {
return this.hiveId ;
}
public void setHiveId(String hiveId) {
this.hiveId = hiveId;
}
public boolean getSuccess() {
return this.success ;
}
public void setSuccess(boolean success) {
this.success = success;
}
public boolean getFailed() {
return this.failed ;
}
public void setFailed(boolean failed) {
this.failed = failed;
}
public String getTableName() {
return this.tableName ;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public String getTableLocation() {
return this.tableLocation ;
}
public void setTableLocation(String tableLocation) {
this.tableLocation = tableLocation;
}
public String getUser() {
return this.user ;
}
public void setUser(String user) {
this.user = user;
}
public String getQueryStartTime() {
return this.queryStartTime ;
}
public void setQueryStartTime(String queryStartTime) {
this.queryStartTime = queryStartTime;
}
public String getQueryEndTime() {
return this.queryEndTime ;
}
public void setQueryEndTime(String queryEndTime) {
this.queryEndTime = queryEndTime;
}
public String getQuery() {
return this.query ;
}
public void setQuery(String query) {
this.query = query;
}
public ArrayList<SourceTables> getSourceTables() {
return this.sourceTables ;
}
public void setSourceTables(ArrayList<SourceTables> sourceTables) {
this.sourceTables = sourceTables;
}
public ArrayList<QueryColumns> getQueryColumns() {
return this.queryColumns ;
}
public void setQueryColumns(ArrayList<QueryColumns> queryColumns) {
this.queryColumns = queryColumns;
}
public ArrayList<WhereClause> getWhereClause() {
return this.whereClause ;
}
public void setWhereClause(ArrayList<WhereClause> whereClause) {
this.whereClause = whereClause;
}
public ArrayList<GroupBy> getGroupBy() {
return this.groupBy ;
}
public void setGroupBy(ArrayList<GroupBy> groupBy) {
this.groupBy = groupBy;
}
public class SourceTables {
public String tableName;
public String tableAlias;
public String databaseName;
public String getTableName() {
return this.tableName ;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public String getTableAlias() {
return this.tableAlias ;
}
public void setTableAlias(String tableAlias) {
this.tableAlias = tableAlias;
}
public String getDatabaseName() {
return this.databaseName ;
}
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName;
}
}
public class QueryColumns {
public String tbAliasOrName;
public String columnName;
public String columnAlias;
public String columnFunction;
public String getTbAliasOrName() {
return this.tbAliasOrName ;
}
public void setTbAliasOrName(String tbAliasOrName) {
this.tbAliasOrName = tbAliasOrName;
}
public String getColumnName() {
return this.columnName ;
}
public void setColumnName(String columnName) {
this.columnName = columnName;
}
public String getColumnAlias() {
return this.columnAlias ;
}
public void setColumnAlias(String columnAlias) {
this.columnAlias = columnAlias;
}
public String getColumnFunction() {
return this.columnFunction ;
}
public void setColumnFunction(String columnFunction) {
this.columnFunction = columnFunction;
}
}
public class GroupBy {
public String tbAliasOrName;
public String columnName;
public String getTbAliasOrName() {
return this.tbAliasOrName ;
}
public void setTbAliasOrName(String tbAliasOrName) {
this.tbAliasOrName = tbAliasOrName;
}
public String getColumnName() {
return this.columnName ;
}
public void setColumnName(String columnName) {
this.columnName = columnName;
}
}
public class WhereClause {
public String tbAliasOrName;
public String columnCondition;
public String columnName;
public String columnOperator;
public String columnValue;
public String getColumnCondition() {
return this.columnCondition ;
}
public void setColumnCondition(String columnCondition) {
this.columnCondition = columnCondition;
}
public String getTbAliasOrName() {
return this.tbAliasOrName ;
}
public void setTbAliasOrName(String tbAliasOrName) {
this.tbAliasOrName = tbAliasOrName;
}
public String getColumnName() {
return this.columnName ;
}
public void setColumnName(String columnName) {
this.columnName = columnName;
}
public String getColumnOperator() {
return this.columnOperator ;
}
public void setColumnOperator(String columnOperator) {
this.columnOperator = columnOperator;
}
public String getColumnValue() {
return this.columnValue ;
}
public void setColumnValue(String columnValue) {
this.columnValue = columnValue;
}
}
public ArrayList<CreateColumns> getCreateColumns() {
return this.createColumns ;
}
public void setCreateColumns(ArrayList<CreateColumns> createColumns) {
this.createColumns = createColumns;
}
public class CreateColumns {
public String columnName;
public String columnType;
public String getColumnName() {
return this.columnName ;
}
public void setColumnName(String columnName) {
this.columnName = columnName;
}
public String getColumnType() {
return this.columnType ;
}
public void setColumnType(String columnType) {
this.columnType = columnType;
}
}
}
......@@ -40,11 +40,11 @@ import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineageBean.CreateColumns;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineageBean.GroupBy;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineageBean.QueryColumns;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineageBean.SourceTables;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineageBean.WhereClause;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage.CreateColumns;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage.GroupBy;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage.QueryColumns;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage.SourceTables;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage.WhereClause;
import org.apache.log4j.ConsoleAppender;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
......@@ -65,7 +65,7 @@ public class HiveLineageInfo implements NodeProcessor {
private final Log LOG = LogFactory.getLog(HiveLineageInfo.class.getName());
public Map<Integer, String> queryMap;
public Integer counter = 0;
public HiveLineageBean hlb = new HiveLineageBean();;
public HiveLineage hlb = new HiveLineage();;
public ArrayList<SourceTables> sourceTables;
public ArrayList<QueryColumns> queryColumns;
public ArrayList<GroupBy> groupBy;
......@@ -78,7 +78,7 @@ public class HiveLineageInfo implements NodeProcessor {
*/
public HiveLineageBean getHLBean() {
public HiveLineage getHLBean() {
return hlb;
}
......
......@@ -36,7 +36,7 @@ import com.google.gson.Gson;
public class Hook implements ExecuteWithHookContext {
private static final Log LOG = LogFactory.getLog(Hook.class.getName());
private HiveLineageBean hlb;
private HiveLineage hlb;
private static final String METADATA_HOST = "localhost";
private static final int METADATA_PORT = 20810;
......@@ -157,7 +157,7 @@ public class Hook implements ExecuteWithHookContext {
}
}
public void fireAndForget(Configuration conf, HiveLineageBean hookData, String queryId) throws Exception {
public void fireAndForget(Configuration conf, HiveLineage hookData, String queryId) throws Exception {
String postUri = String.format("http://%s:%i%s%s", METADATA_HOST, METADATA_PORT, METADATA_PATH);
if (conf.getTrimmed("hadoop.metadata.hive.hook.uri") != null) {
postUri = conf.getTrimmed("hadoop.metadata.hive.hook.uri");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment