Commit 9e1f3663 by Suma Shivaprasad

ATLAS-619 Canonicalize hive queries (sumasai)

parent b6a0eee7
......@@ -211,10 +211,10 @@ public class FalconHook extends AtlasHook implements FalconEventPublisher {
if (!inputs.isEmpty() || !outputs.isEmpty()) {
Referenceable processEntity = new Referenceable(FalconDataTypes.FALCON_PROCESS_ENTITY.getName());
processEntity.set(FalconDataModelGenerator.NAME, String.format("%s@%s", process.getName(),
processEntity.set(FalconDataModelGenerator.NAME, String.format("%s", process.getName(),
cluster.getName()));
processEntity.set(FalconDataModelGenerator.PROCESS_NAME, process.getName());
processEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, String.format("%s@%s", process.getName(),
cluster.getName()));
processEntity.set(FalconDataModelGenerator.TIMESTAMP, timestamp);
if (!inputs.isEmpty()) {
processEntity.set(FalconDataModelGenerator.INPUTS, inputs);
......
......@@ -57,7 +57,6 @@ public class FalconDataModelGenerator {
private final Map<String, StructTypeDefinition> structTypeDefinitionMap;
public static final String NAME = "name";
public static final String PROCESS_NAME = "processName";
public static final String TIMESTAMP = "timestamp";
public static final String USER = "owned-by";
public static final String TAGS = "tag-classification";
......@@ -107,8 +106,6 @@ public class FalconDataModelGenerator {
private void createProcessEntityClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(PROCESS_NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition(TIMESTAMP, DataTypes.LONG_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition(USER, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
......
......@@ -150,7 +150,7 @@ public class FalconHookIT {
String pid = assertProcessIsRegistered(cluster.getName(), process.getName());
Referenceable processEntity = atlasClient.getEntity(pid);
assertNotNull(processEntity);
assertEquals(processEntity.get("processName"), process.getName());
assertEquals(processEntity.get(AtlasClient.NAME), process.getName());
Id inId = (Id) ((List)processEntity.get("inputs")).get(0);
Referenceable inEntity = atlasClient.getEntity(inId._getId());
......@@ -207,7 +207,7 @@ public class FalconHookIT {
String pid = assertProcessIsRegistered(cluster.getName(), process.getName());
Referenceable processEntity = atlasClient.getEntity(pid);
assertEquals(processEntity.get("processName"), process.getName());
assertEquals(processEntity.get(AtlasClient.NAME), process.getName());
assertNull(processEntity.get("inputs"));
Id outId = (Id) ((List)processEntity.get("outputs")).get(0);
......@@ -233,8 +233,8 @@ public class FalconHookIT {
private String assertProcessIsRegistered(String clusterName, String processName) throws Exception {
String name = processName + "@" + clusterName;
LOG.debug("Searching for process {}", name);
String query = String.format("%s as t where name = '%s' select t",
FalconDataTypes.FALCON_PROCESS_ENTITY.getName(), name);
String query = String.format("%s as t where %s = '%s' select t",
FalconDataTypes.FALCON_PROCESS_ENTITY.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
return assertEntityIsRegistered(query);
}
......
......@@ -72,6 +72,13 @@
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-cli</artifactId>
<version>${hive.version}</version>
<scope>test</scope>
......
......@@ -288,6 +288,8 @@ public class HiveDataModelGenerator {
new AttributeDefinition("queryPlan", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("queryId", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("recentQueries", String.format("array<%s>", DataTypes.STRING_TYPE.getName()), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("queryGraph", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),};
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.hive.rewrite;
import org.apache.hadoop.hive.ql.parse.ASTNode;
public interface ASTRewriter {
void rewrite(RewriteContext ctx, ASTNode node) throws RewriteException;
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.hive.rewrite;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hadoop.hive.ql.parse.ParseUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class HiveASTRewriter {
private Context queryContext;
private RewriteContext rwCtx;
private List<ASTRewriter> rewriters = new ArrayList<>();
private static final Logger LOG = LoggerFactory.getLogger(HiveASTRewriter.class);
public HiveASTRewriter(HiveConf conf) throws RewriteException {
try {
queryContext = new Context(conf);
setUpRewriters();
} catch (IOException e) {
throw new RewriteException("Exception while rewriting query : " , e);
}
}
private void setUpRewriters() throws RewriteException {
ASTRewriter rewriter = new LiteralRewriter();
rewriters.add(rewriter);
}
public String rewrite(String sourceQry) throws RewriteException {
String result = sourceQry;
ASTNode tree = null;
try {
ParseDriver pd = new ParseDriver();
tree = pd.parse(sourceQry, queryContext, true);
tree = ParseUtils.findRootNonNullToken(tree);
this.rwCtx = new RewriteContext(sourceQry, tree, queryContext.getTokenRewriteStream());
rewrite(tree);
result = toSQL();
} catch (ParseException e) {
LOG.error("Could not parse the query {} ", sourceQry, e);
throw new RewriteException("Could not parse query : " , e);
}
return result;
}
private void rewrite(ASTNode origin) throws RewriteException {
ASTNode node = origin;
if (node != null) {
for(ASTRewriter rewriter : rewriters) {
rewriter.rewrite(rwCtx, node);
}
if (node.getChildren() != null) {
for (int i = 0; i < node.getChildren().size(); i++) {
rewrite((ASTNode) node.getChild(i));
}
}
}
}
public String toSQL() {
return rwCtx.getTokenRewriteStream().toString();
}
public String printAST() {
return rwCtx.getOriginNode().dump();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.hive.rewrite;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import java.util.HashMap;
import java.util.Map;
public class LiteralRewriter implements ASTRewriter {
public static Map<Integer, String> LITERAL_TOKENS = new HashMap<Integer, String>() {{
put(HiveParser.Number, "NUMBER_LITERAL");
put(HiveParser.Digit, "DIGIT_LITERAL");
put(HiveParser.HexDigit, "HEX_LITERAL");
put(HiveParser.Exponent, "EXPONENT_LITERAL");
put(HiveParser.StringLiteral, "'STRING_LITERAL'");
put(HiveParser.BigintLiteral, "BIGINT_LITERAL");
put(HiveParser.SmallintLiteral, "SMALLINT_LITERAL");
put(HiveParser.TinyintLiteral, "TINYINT_LITERAL");
put(HiveParser.DecimalLiteral, "DECIMAL_LITERAL");
put(HiveParser.ByteLengthLiteral, "BYTE_LENGTH_LITERAL");
put(HiveParser.TOK_STRINGLITERALSEQUENCE, "'STRING_LITERAL_SEQ'");
put(HiveParser.TOK_CHARSETLITERAL, "'CHARSET_LITERAL'");
put(HiveParser.KW_TRUE, "BOOLEAN_LITERAL");
put(HiveParser.KW_FALSE, "BOOLEAN_LITERAL");
}};
@Override
public void rewrite(RewriteContext ctx, final ASTNode node) throws RewriteException {
try {
processLiterals(ctx, node);
} catch(Exception e) {
throw new RewriteException("Could not normalize query", e);
}
}
private void processLiterals(final RewriteContext ctx, final ASTNode node) {
// Take child ident.totext
if (isLiteral(node)) {
replaceLiteral(ctx, node);
}
}
private boolean isLiteral(ASTNode node) {
if (LITERAL_TOKENS.containsKey(node.getType())) {
return true;
}
return false;
}
void replaceLiteral(RewriteContext ctx, ASTNode valueNode) {
//Reset the token stream
String literalVal = LITERAL_TOKENS.get(valueNode.getType());
ctx.getTokenRewriteStream().replace(valueNode.getTokenStartIndex(),
valueNode.getTokenStopIndex(), literalVal);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.hive.rewrite;
import org.antlr.runtime.TokenRewriteStream;
import org.apache.hadoop.hive.ql.parse.ASTNode;
public class RewriteContext {
private String origQuery;
private TokenRewriteStream rewriteStream;
private ASTNode origin;
RewriteContext(String origQuery, ASTNode origin, TokenRewriteStream rewriteStream) {
this.origin = origin;
this.rewriteStream = rewriteStream;
}
public TokenRewriteStream getTokenRewriteStream() {
return rewriteStream;
}
public ASTNode getOriginNode() {
return origin;
}
public String getOriginalQuery() {
return origQuery;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.hive.rewrite;
import org.apache.hadoop.hive.ql.parse.ParseException;
public class RewriteException extends Exception {
public RewriteException(final String message, final Exception exception) {
super(message, exception);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.hive.bridge;
import org.apache.atlas.hive.hook.HiveHook;
import org.apache.atlas.hive.rewrite.HiveASTRewriter;
import org.apache.atlas.hive.rewrite.RewriteException;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
public class HiveLiteralRewriterTest {
private HiveConf conf;
@BeforeClass
public void setup() {
conf = new HiveConf();
conf.addResource("/hive-site.xml");
SessionState ss = new SessionState(conf, "testuser");
SessionState.start(ss);
conf.set("hive.lock.manager", "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
}
@Test
public void testLiteralRewrite() throws RewriteException {
HiveHook.HiveEventContext ctx = new HiveHook.HiveEventContext();
ctx.setQueryStr("insert into table testTable partition(dt='2014-01-01') select * from test1 where dt = '2014-01-01'" +
" and intColumn = 10" +
" and decimalColumn = 1.10" +
" and charColumn = 'a'" +
" and hexColumn = unhex('\\0xFF')" +
" and expColumn = cast('-1.5e2' as int)" +
" and boolCol = true");
HiveASTRewriter queryRewriter = new HiveASTRewriter(conf);
String result = queryRewriter.rewrite(ctx.getQueryStr());
System.out.println("normlized sql : " + result);
final String normalizedSQL = "insert into table testTable partition(dt='STRING_LITERAL') " +
"select * from test1 where dt = 'STRING_LITERAL' " +
"and intColumn = NUMBER_LITERAL " +
"and decimalColumn = NUMBER_LITERAL and " +
"charColumn = 'STRING_LITERAL' and " +
"hexColumn = unhex('STRING_LITERAL') and " +
"expColumn = cast('STRING_LITERAL' as int) and " +
"boolCol = BOOLEAN_LITERAL";
Assert.assertEquals(result, normalizedSQL);
}
}
......@@ -17,6 +17,16 @@
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>local</value>
</property>
<property>
<name>fs.default.name</name>
<value>file:///</value>
</property>
<property>
<name>hive.exec.post.hooks</name>
<value>org.apache.atlas.hive.hook.HiveHook</value>
</property>
......
......@@ -105,7 +105,9 @@ public class SqoopHook extends SqoopJobDataPublisher {
private Referenceable createSqoopProcessInstance(Referenceable dbStoreRef, Referenceable hiveTableRef,
SqoopJobDataPublisher.Data data, String clusterName) {
Referenceable procRef = new Referenceable(SqoopDataTypes.SQOOP_PROCESS.getName());
procRef.set(SqoopDataModelGenerator.NAME, getSqoopProcessName(data, clusterName));
final String sqoopProcessName = getSqoopProcessName(data, clusterName);
procRef.set(SqoopDataModelGenerator.NAME, sqoopProcessName);
procRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, sqoopProcessName);
procRef.set(SqoopDataModelGenerator.OPERATION, data.getOperation());
procRef.set(SqoopDataModelGenerator.INPUTS, dbStoreRef);
procRef.set(SqoopDataModelGenerator.OUTPUTS, hiveTableRef);
......
......@@ -107,8 +107,8 @@ public class SqoopHookIT {
private String assertSqoopProcessIsRegistered(String processName) throws Exception {
LOG.debug("Searching for sqoop process {}", processName);
String query = String.format(
"%s as t where name = '%s' select t",
SqoopDataTypes.SQOOP_PROCESS.getName(), processName);
"%s as t where %s = '%s' select t",
SqoopDataTypes.SQOOP_PROCESS.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, processName);
return assertEntityIsRegistered(query);
}
......
......@@ -110,7 +110,8 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
Referenceable topologyReferenceable = new Referenceable(
StormDataTypes.STORM_TOPOLOGY.getName());
topologyReferenceable.set("id", topologyInfo.get_id());
topologyReferenceable.set("name", topologyInfo.get_name());
topologyReferenceable.set(AtlasClient.NAME, topologyInfo.get_name());
topologyReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, topologyInfo.get_name());
String owner = topologyInfo.get_owner();
if (StringUtils.isEmpty(owner)) {
owner = ANONYMOUS_OWNER;
......
......@@ -18,6 +18,7 @@
package org.apache.atlas.storm.model
import org.apache.atlas.AtlasClient
import org.apache.atlas.typesystem.TypesDef
import org.apache.atlas.typesystem.builders.TypesBuilder
import org.apache.atlas.typesystem.json.TypesSerialization
......@@ -42,7 +43,7 @@ object StormDataModel extends App {
* Also, Topology contains the Graph of Nodes
* Topology => Node(s) -> Spouts/Bolts
*/
_class(StormDataTypes.STORM_TOPOLOGY.getName, List("Process")) {
_class(StormDataTypes.STORM_TOPOLOGY.getName, List(AtlasClient.PROCESS_SUPER_TYPE)) {
"id" ~ (string, required, indexed, unique)
"description" ~ (string, optional, indexed)
"owner" ~ (string, required, indexed)
......
......@@ -3,6 +3,7 @@ Apache Atlas Release Notes
--trunk - unreleased
INCOMPATIBLE CHANGES:
ATLAS-619 Canonicalize hive queries (sumasai)
ATLAS-497 Simple Authorization (saqeeb.s via yhemanth)
ATLAS-661 REST API Authentication (nixonrodrigues via yhemanth)
ATLAS-672 UI: Make dashboard v2 the default UI implementation (bergenholtz via yhemanth)
......
......@@ -182,20 +182,21 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang
DESCRIPTION_ATTRIBUTE);
createType(datasetType);
HierarchicalTypeDefinition<ClassType> processType = TypesUtil
.createClassTypeDef(AtlasClient.PROCESS_SUPER_TYPE, ImmutableSet.<String>of(), NAME_ATTRIBUTE,
DESCRIPTION_ATTRIBUTE,
new AttributeDefinition("inputs", DataTypes.arrayTypeName(AtlasClient.DATA_SET_SUPER_TYPE),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("outputs", DataTypes.arrayTypeName(AtlasClient.DATA_SET_SUPER_TYPE),
Multiplicity.OPTIONAL, false, null));
createType(processType);
HierarchicalTypeDefinition<ClassType> referenceableType = TypesUtil
.createClassTypeDef(AtlasClient.REFERENCEABLE_SUPER_TYPE, ImmutableSet.<String>of(),
TypesUtil.createUniqueRequiredAttrDef(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
DataTypes.STRING_TYPE));
createType(referenceableType);
HierarchicalTypeDefinition<ClassType> processType = TypesUtil
.createClassTypeDef(AtlasClient.PROCESS_SUPER_TYPE, ImmutableSet.<String>of(AtlasClient.REFERENCEABLE_SUPER_TYPE),
TypesUtil.createRequiredAttrDef(AtlasClient.NAME, DataTypes.STRING_TYPE),
DESCRIPTION_ATTRIBUTE,
new AttributeDefinition("inputs", DataTypes.arrayTypeName(AtlasClient.DATA_SET_SUPER_TYPE),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("outputs", DataTypes.arrayTypeName(AtlasClient.DATA_SET_SUPER_TYPE),
Multiplicity.OPTIONAL, false, null));
createType(processType);
}
private void createType(HierarchicalTypeDefinition<ClassType> type) throws AtlasException {
......
......@@ -331,7 +331,8 @@ public class BaseHiveRepositoryTest {
String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames)
throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set(AtlasClient.NAME, name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description);
referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis());
......
......@@ -376,7 +376,8 @@ public class QuickStart {
throws Exception {
Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames);
// super type attributes
referenceable.set("name", name);
referenceable.set(AtlasClient.NAME, name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description);
referenceable.set(INPUTS_ATTRIBUTE, inputTables);
referenceable.set(OUTPUTS_ATTRIBUTE, outputTables);
......
......@@ -18,6 +18,8 @@
package org.apache.atlas.examples;
import org.apache.atlas.Atlas;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.persistence.Id;
......@@ -94,10 +96,10 @@ public class QuickStartIT extends BaseResourceIT {
@Test
public void testProcessIsAdded() throws AtlasServiceException, JSONException {
Referenceable loadProcess = serviceClient.getEntity(QuickStart.LOAD_PROCESS_TYPE, "name",
Referenceable loadProcess = serviceClient.getEntity(QuickStart.LOAD_PROCESS_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
QuickStart.LOAD_SALES_DAILY_PROCESS);
assertEquals(QuickStart.LOAD_SALES_DAILY_PROCESS, loadProcess.get("name"));
assertEquals(QuickStart.LOAD_SALES_DAILY_PROCESS, loadProcess.get(AtlasClient.NAME));
assertEquals(QuickStart.LOAD_SALES_DAILY_PROCESS_DESCRIPTION, loadProcess.get("description"));
List<Id> inputs = (List<Id>)loadProcess.get(QuickStart.INPUTS_ATTRIBUTE);
......@@ -141,12 +143,12 @@ public class QuickStartIT extends BaseResourceIT {
@Test
public void testViewIsAdded() throws AtlasServiceException, JSONException {
Referenceable view = serviceClient.getEntity(QuickStart.VIEW_TYPE, "name", QuickStart.PRODUCT_DIM_VIEW);
Referenceable view = serviceClient.getEntity(QuickStart.VIEW_TYPE, AtlasClient.NAME, QuickStart.PRODUCT_DIM_VIEW);
assertEquals(QuickStart.PRODUCT_DIM_VIEW, view.get("name"));
assertEquals(QuickStart.PRODUCT_DIM_VIEW, view.get(AtlasClient.NAME));
Id productDimId = getTable(QuickStart.PRODUCT_DIM_TABLE).getId();
Id inputTableId = ((List<Id>)view.get(QuickStart.INPUT_TABLES_ATTRIBUTE)).get(0);
Id inputTableId = ((List<Id>) view.get(QuickStart.INPUT_TABLES_ATTRIBUTE)).get(0);
assertEquals(productDimId, inputTableId);
}
}
......@@ -184,7 +184,8 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
table("sales_fact_daily_mv" + randomString(), "sales fact daily materialized view", reportingDB,
"Joe BI", "MANAGED", salesFactColumns, "Metric");
loadProcess("loadSalesDaily" + randomString(), "John ETL", ImmutableList.of(salesFact, timeDim),
String procName = "loadSalesDaily" + randomString();
loadProcess(procName, "John ETL", ImmutableList.of(salesFact, timeDim),
ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL");
salesMonthlyTable = "sales_fact_monthly_mv" + randomString();
......@@ -237,7 +238,8 @@ public class HiveLineageJerseyResourceIT extends BaseResourceIT {
Id loadProcess(String name, String user, List<Id> inputTables, List<Id> outputTables, String queryText,
String queryPlan, String queryId, String queryGraph, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set(AtlasClient.NAME, name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis());
referenceable.set("endTime", System.currentTimeMillis() + 10000);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment