Commit ad7604fc by Suma Shivaprasad

ATLAS-523 Support alter view ( sumasai via shwethags)

parent 44dbfe57
...@@ -86,7 +86,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -86,7 +86,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
private static final long keepAliveTimeDefault = 10; private static final long keepAliveTimeDefault = 10;
private static final int queueSizeDefault = 10000; private static final int queueSizeDefault = 10000;
private static boolean typesRegistered = false;
private static Configuration atlasProperties; private static Configuration atlasProperties;
class HiveEvent { class HiveEvent {
...@@ -212,6 +211,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -212,6 +211,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
case CREATETABLE_AS_SELECT: case CREATETABLE_AS_SELECT:
case CREATEVIEW: case CREATEVIEW:
case ALTERVIEW_AS:
case LOAD: case LOAD:
case EXPORT: case EXPORT:
case IMPORT: case IMPORT:
...@@ -229,15 +229,12 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -229,15 +229,12 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
case ALTERTABLE_CLUSTER_SORT: case ALTERTABLE_CLUSTER_SORT:
case ALTERTABLE_BUCKETNUM: case ALTERTABLE_BUCKETNUM:
case ALTERTABLE_PROPERTIES: case ALTERTABLE_PROPERTIES:
case ALTERVIEW_PROPERTIES:
case ALTERTABLE_SERDEPROPERTIES: case ALTERTABLE_SERDEPROPERTIES:
case ALTERTABLE_SERIALIZER: case ALTERTABLE_SERIALIZER:
alterTable(dgiBridge, event); alterTable(dgiBridge, event);
break; break;
case ALTERVIEW_AS:
//update inputs/outputs?
break;
case ALTERTABLE_ADDCOLS: case ALTERTABLE_ADDCOLS:
case ALTERTABLE_REPLACECOLS: case ALTERTABLE_REPLACECOLS:
case ALTERTABLE_RENAMECOL: case ALTERTABLE_RENAMECOL:
......
...@@ -18,8 +18,8 @@ ...@@ -18,8 +18,8 @@
package org.apache.atlas.hive.hook; package org.apache.atlas.hive.hook;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import groovy.transform.Immutable;
import org.apache.atlas.ApplicationProperties; import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasClient; import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException; import org.apache.atlas.AtlasServiceException;
...@@ -38,13 +38,6 @@ import org.apache.hadoop.hive.metastore.TableType; ...@@ -38,13 +38,6 @@ import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
...@@ -52,14 +45,7 @@ import org.testng.Assert; ...@@ -52,14 +45,7 @@ import org.testng.Assert;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.net.URLClassLoader;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
...@@ -221,6 +207,55 @@ public class HiveHookIT { ...@@ -221,6 +207,55 @@ public class HiveHookIT {
} }
@Test @Test
public void testAlterViewAsSelect() throws Exception {
//Create the view from table1
String table1Name = createTable();
String viewName = tableName();
String query = "create view " + viewName + " as select * from " + table1Name;
runCommand(query);
String table1Id = assertTableIsRegistered(DEFAULT_DB, table1Name);
assertProcessIsRegistered(query);
String viewId = assertTableIsRegistered(DEFAULT_DB, viewName);
//Check lineage which includes table1
String datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName);
JSONObject response = dgiCLient.getInputGraph(datasetName);
JSONObject vertices = response.getJSONObject("values").getJSONObject("vertices");
Assert.assertTrue(vertices.has(viewId));
Assert.assertTrue(vertices.has(table1Id));
//Alter the view from table2
String table2Name = createTable();
query = "alter view " + viewName + " as select * from " + table2Name;
runCommand(query);
//Check if alter view process is reqistered
assertProcessIsRegistered(query);
String table2Id = assertTableIsRegistered(DEFAULT_DB, table2Name);
Assert.assertEquals(assertTableIsRegistered(DEFAULT_DB, viewName), viewId);
//Check lineage which includes both table1 and table2
datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName);
response = dgiCLient.getInputGraph(datasetName);
vertices = response.getJSONObject("values").getJSONObject("vertices");
Assert.assertTrue(vertices.has(viewId));
//This is through the alter view process
Assert.assertTrue(vertices.has(table2Id));
//This is through the Create view process
Assert.assertTrue(vertices.has(table1Id));
//Outputs dont exist
response = dgiCLient.getOutputGraph(datasetName);
vertices = response.getJSONObject("values").getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 0);
}
@Test
public void testLoadData() throws Exception { public void testLoadData() throws Exception {
String tableName = createTable(false); String tableName = createTable(false);
...@@ -520,7 +555,7 @@ public class HiveHookIT { ...@@ -520,7 +555,7 @@ public class HiveHookIT {
} }
private String getSerializedProps(Map<String, String> expectedProps) { private String getSerializedProps(Map<String, String> expectedProps) {
StringBuffer sb = new StringBuffer(); StringBuilder sb = new StringBuilder();
for(String expectedPropKey : expectedProps.keySet()) { for(String expectedPropKey : expectedProps.keySet()) {
if(sb.length() > 0) { if(sb.length() > 0) {
sb.append(","); sb.append(",");
...@@ -535,34 +570,68 @@ public class HiveHookIT { ...@@ -535,34 +570,68 @@ public class HiveHookIT {
@Test @Test
public void testAlterTableProperties() throws Exception { public void testAlterTableProperties() throws Exception {
String tableName = createTable(); String tableName = createTable();
final String fmtQuery = "alter table %s %s TBLPROPERTIES (%s)";
testAlterProperties(tableName, fmtQuery);
}
private void testAlterProperties(String tableName, String fmtQuery) throws Exception {
final String SET_OP = "set";
final String UNSET_OP = "unset";
final Map<String, String> expectedProps = new HashMap<String, String>() {{ final Map<String, String> expectedProps = new HashMap<String, String>() {{
put("testPropKey1", "testPropValue1"); put("testPropKey1", "testPropValue1");
put("comment", "test comment"); put("comment", "test comment");
}}; }};
final String fmtQuery = "alter table %s set TBLPROPERTIES (%s)"; String query = String.format(fmtQuery, tableName, SET_OP, getSerializedProps(expectedProps));
String query = String.format(fmtQuery, tableName, getSerializedProps(expectedProps));
runCommand(query); runCommand(query);
verifyTableProperties(tableName, expectedProps); verifyTableProperties(tableName, expectedProps, false);
expectedProps.put("testPropKey2", "testPropValue2"); expectedProps.put("testPropKey2", "testPropValue2");
//Add another property //Add another property
query = String.format(fmtQuery, tableName, getSerializedProps(expectedProps)); query = String.format(fmtQuery, tableName, SET_OP, getSerializedProps(expectedProps));
runCommand(query);
verifyTableProperties(tableName, expectedProps, false);
//Unset all the props
StringBuilder sb = new StringBuilder("'");
query = String.format(fmtQuery, tableName, UNSET_OP, Joiner.on("','").skipNulls().appendTo(sb, expectedProps.keySet()).append('\''));
runCommand(query);
verifyTableProperties(tableName, expectedProps, true);
}
@Test
public void testAlterViewProperties() throws Exception {
String tableName = createTable();
String viewName = tableName();
String query = "create view " + viewName + " as select * from " + tableName;
runCommand(query); runCommand(query);
verifyTableProperties(tableName, expectedProps); final String fmtQuery = "alter view %s %s TBLPROPERTIES (%s)";
testAlterProperties(viewName, fmtQuery);
} }
private void verifyTableProperties(String tableName, Map<String, String> expectedProps) throws Exception { private void verifyTableProperties(String tableName, Map<String, String> expectedProps, boolean checkIfNotExists) throws Exception {
String tableId = assertTableIsRegistered(DEFAULT_DB, tableName); String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
Referenceable tableRef = dgiCLient.getEntity(tableId); Referenceable tableRef = dgiCLient.getEntity(tableId);
Map<String, String> parameters = (Map<String, String>) tableRef.get(HiveDataModelGenerator.PARAMETERS); Map<String, String> parameters = (Map<String, String>) tableRef.get(HiveDataModelGenerator.PARAMETERS);
Assert.assertNotNull(parameters); if (checkIfNotExists == false) {
//Comment should exist since SET TBLPOPERTIES only adds properties. Doe not remove existing ones //Check if properties exist
for (String propKey : expectedProps.keySet()) { Assert.assertNotNull(parameters);
Assert.assertEquals(parameters.get(propKey), expectedProps.get(propKey)); for (String propKey : expectedProps.keySet()) {
Assert.assertEquals(parameters.get(propKey), expectedProps.get(propKey));
}
} else {
//Check if properties dont exist
if (expectedProps != null && parameters != null) {
for (String propKey : expectedProps.keySet()) {
Assert.assertFalse(parameters.containsKey(propKey));
}
}
} }
} }
......
...@@ -13,6 +13,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset ...@@ -13,6 +13,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags) ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES: ALL CHANGES:
ATLAS-523 Support alter view (sumasai via shwethags)
ATLAS-555 Tag creation from UI fails due to missing description attribute (guptaneeru via shwethags) ATLAS-555 Tag creation from UI fails due to missing description attribute (guptaneeru via shwethags)
ATLAS-522 Support Alter table commands (sumasai via shwethags) ATLAS-522 Support Alter table commands (sumasai via shwethags)
ATLAS-512 Decouple currently integrating components from availability of Atlas service for raising metadata events ( yhemanth via sumasai) ATLAS-512 Decouple currently integrating components from availability of Atlas service for raising metadata events ( yhemanth via sumasai)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment