Commit 755e59c0 by Suma Shivaprasad

ATLAS-528 Support drop table,view (sumasai)

parent b8f4ffb6
...@@ -335,8 +335,8 @@ public class HiveMetaStoreBridge { ...@@ -335,8 +335,8 @@ public class HiveMetaStoreBridge {
tableReference.set(HiveDataModelGenerator.COLUMNS, getColumns(hiveTable.getCols(), tableQualifiedName)); tableReference.set(HiveDataModelGenerator.COLUMNS, getColumns(hiveTable.getCols(), tableQualifiedName));
// add reference to the StorageDescriptorx // add reference to the StorageDescriptor
Referenceable sdReferenceable = fillStorageDescStruct(hiveTable.getSd(), tableQualifiedName, tableQualifiedName); Referenceable sdReferenceable = fillStorageDesc(hiveTable.getSd(), tableQualifiedName, getStorageDescQFName(tableQualifiedName));
tableReference.set("sd", sdReferenceable); tableReference.set("sd", sdReferenceable);
// add reference to the Partition Keys // add reference to the Partition Keys
...@@ -359,6 +359,10 @@ public class HiveMetaStoreBridge { ...@@ -359,6 +359,10 @@ public class HiveMetaStoreBridge {
return tableReference; return tableReference;
} }
private String getStorageDescQFName(String entityQualifiedName) {
return entityQualifiedName + "_storage";
}
private Referenceable registerTable(Referenceable dbReference, Table table) throws Exception { private Referenceable registerTable(Referenceable dbReference, Table table) throws Exception {
String dbName = table.getDbName(); String dbName = table.getDbName();
String tableName = table.getTableName(); String tableName = table.getTableName();
...@@ -410,7 +414,7 @@ public class HiveMetaStoreBridge { ...@@ -410,7 +414,7 @@ public class HiveMetaStoreBridge {
return new Referenceable(sd.getId().id, sd.getTypeName(), null); return new Referenceable(sd.getId().id, sd.getTypeName(), null);
} }
public Referenceable fillStorageDescStruct(StorageDescriptor storageDesc, String tableQualifiedName, public Referenceable fillStorageDesc(StorageDescriptor storageDesc, String tableQualifiedName,
String sdQualifiedName) throws Exception { String sdQualifiedName) throws Exception {
LOG.debug("Filling storage descriptor information for " + storageDesc); LOG.debug("Filling storage descriptor information for " + storageDesc);
...@@ -466,7 +470,7 @@ public class HiveMetaStoreBridge { ...@@ -466,7 +470,7 @@ public class HiveMetaStoreBridge {
ref.set("path", pathUri); ref.set("path", pathUri);
// Path path = new Path(pathUri); // Path path = new Path(pathUri);
// ref.set("name", path.getName()); // ref.set("name", path.getName());
// TODO - Fix after ATLAS-542 to shorter Name //TODO - Fix after ATLAS-542 to shorter Name
ref.set("name", pathUri); ref.set("name", pathUri);
ref.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, pathUri); ref.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, pathUri);
return ref; return ref;
......
...@@ -308,6 +308,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -308,6 +308,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
case EXPORT: case EXPORT:
case IMPORT: case IMPORT:
case QUERY: case QUERY:
case TRUNCATETABLE:
registerProcess(dgiBridge, event); registerProcess(dgiBridge, event);
break; break;
...@@ -326,6 +327,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -326,6 +327,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
case ALTERTABLE_ADDCOLS: case ALTERTABLE_ADDCOLS:
case ALTERTABLE_REPLACECOLS: case ALTERTABLE_REPLACECOLS:
case ALTERTABLE_RENAMECOL: case ALTERTABLE_RENAMECOL:
case ALTERTABLE_PARTCOLTYPE:
handleEventOutputs(dgiBridge, event, Type.TABLE); handleEventOutputs(dgiBridge, event, Type.TABLE);
break; break;
case ALTERTABLE_LOCATION: case ALTERTABLE_LOCATION:
...@@ -334,17 +336,37 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -334,17 +336,37 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
//Track altered lineage in case of external tables //Track altered lineage in case of external tables
handleExternalTables(dgiBridge, event, tablesUpdated.get(0).getLeft(), tablesUpdated.get(0).getRight()); handleExternalTables(dgiBridge, event, tablesUpdated.get(0).getLeft(), tablesUpdated.get(0).getRight());
} }
break;
case ALTERDATABASE: case ALTERDATABASE:
case ALTERDATABASE_OWNER: case ALTERDATABASE_OWNER:
handleEventOutputs(dgiBridge, event, Type.DATABASE); handleEventOutputs(dgiBridge, event, Type.DATABASE);
break; break;
case DROPTABLE:
case DROPVIEW:
deleteTable(dgiBridge, event);
break;
default: default:
} }
notifyEntities(messages); notifyEntities(messages);
} }
private void deleteTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event) {
for (Entity output : event.outputs) {
if (Type.TABLE.equals(output.getType())) {
final String tblQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(), output.getTable().getDbName(), output.getTable().getTableName());
LOG.info("Deleting table {} ", tblQualifiedName);
messages.add(
new HookNotification.EntityDeleteRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(),
HiveDataModelGenerator.NAME,
tblQualifiedName));
}
}
}
private void renameTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event) throws Exception { private void renameTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event) throws Exception {
//crappy, no easy of getting new name //crappy, no easy of getting new name
assert event.getInputs() != null && event.getInputs().size() == 1; assert event.getInputs() != null && event.getInputs().size() == 1;
......
...@@ -497,7 +497,6 @@ public class HiveHookIT { ...@@ -497,7 +497,6 @@ public class HiveHookIT {
runCommand(query); runCommand(query);
Referenceable processReference = validateProcess(query, 1, 1); Referenceable processReference = validateProcess(query, 1, 1);
validateHDFSPaths(processReference, filename, OUTPUTS); validateHDFSPaths(processReference, filename, OUTPUTS);
validateInputTables(processReference, tableId); validateInputTables(processReference, tableId);
//Import //Import
...@@ -510,7 +509,6 @@ public class HiveHookIT { ...@@ -510,7 +509,6 @@ public class HiveHookIT {
validateHDFSPaths(processReference, filename, INPUTS); validateHDFSPaths(processReference, filename, INPUTS);
validateOutputTables(processReference, tableId); validateOutputTables(processReference, tableId);
} }
@Test @Test
...@@ -541,8 +539,6 @@ public class HiveHookIT { ...@@ -541,8 +539,6 @@ public class HiveHookIT {
validateHDFSPaths(processReference, filename, INPUTS); validateHDFSPaths(processReference, filename, INPUTS);
validateOutputTables(processReference, tableId); validateOutputTables(processReference, tableId);
} }
@Test @Test
...@@ -684,6 +680,41 @@ public class HiveHookIT { ...@@ -684,6 +680,41 @@ public class HiveHookIT {
Assert.assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), "id"); Assert.assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), "id");
} }
@Test()
public void testTruncateTable() throws Exception {
String tableName = createTable(false);
String query = String.format("truncate table %s", tableName);
runCommand(query);
String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
validateProcess(query, 0, 1);
//Check lineage
String datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName);
JSONObject response = dgiCLient.getInputGraph(datasetName);
JSONObject vertices = response.getJSONObject("values").getJSONObject("vertices");
//Below should be assertTrue - Fix https://issues.apache.org/jira/browse/ATLAS-653
Assert.assertFalse(vertices.has(tableId));
}
@Test
public void testAlterTablePartitionColumnType() throws Exception {
String tableName = createTable(true, true, false);
final String newType = "int";
String query = String.format("ALTER TABLE %s PARTITION COLUMN (dt %s)", tableName, newType);
runCommand(query);
final String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
final String dtColId = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "dt"));
Referenceable table = dgiCLient.getEntity(tableId);
Referenceable column = dgiCLient.getEntity(dtColId);
Assert.assertEquals(column.get("type"), newType);
final List<Referenceable> partitionKeys = (List<Referenceable>) table.get("partitionKeys");
Assert.assertEquals(partitionKeys.size(), 1);
Assert.assertEquals(partitionKeys.get(0).getId()._getId(), dtColId);
}
@Test @Test
public void testAlterViewRename() throws Exception { public void testAlterViewRename() throws Exception {
String tableName = createTable(); String tableName = createTable();
...@@ -834,7 +865,56 @@ public class HiveHookIT { ...@@ -834,7 +865,56 @@ public class HiveHookIT {
//Add another property //Add another property
runSerdePropsQuery(tableName, expectedProps); runSerdePropsQuery(tableName, expectedProps);
}
@Test
public void testDropTable() throws Exception {
//Test Deletion of tables and its corrresponding columns
String tableName = createTable(true, true, false);
assertTableIsRegistered(DEFAULT_DB, tableName);
assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "id"));
assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "name"));
final String query = String.format("drop table %s ", tableName);
runCommand(query);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "name"));
assertTableIsNotRegistered(DEFAULT_DB, tableName);
}
@Test
public void testDropNonExistingTable() throws Exception {
//Test Deletion of a non existing table
final String tableName = "nonexistingtable";
assertTableIsNotRegistered(DEFAULT_DB, tableName);
final String query = String.format("drop table if exists %s", tableName);
runCommand(query);
//Should have no effect
assertTableIsNotRegistered(DEFAULT_DB, tableName);
assertProcessIsNotRegistered(query);
}
@Test
public void testDropView() throws Exception {
//Test Deletion of tables and its corrresponding columns
String tableName = createTable(true, true, false);
String viewName = tableName();
String query = "create view " + viewName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered(DEFAULT_DB, viewName);
assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), "id"));
assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), "name"));
query = String.format("drop view %s ", viewName);
runCommand(query);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), "id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), "name"));
assertTableIsNotRegistered(DEFAULT_DB, viewName);
} }
private void runSerdePropsQuery(String tableName, Map<String, String> expectedProps) throws Exception { private void runSerdePropsQuery(String tableName, Map<String, String> expectedProps) throws Exception {
......
...@@ -13,6 +13,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset ...@@ -13,6 +13,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags) ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES: ALL CHANGES:
ATLAS-528 Support drop table,view (sumasai)
ATLAS-603 Document High Availability of Atlas (yhemanth via sumasai) ATLAS-603 Document High Availability of Atlas (yhemanth via sumasai)
ATLAS-498 Support Embedded HBase (tbeerbower via sumasai) ATLAS-498 Support Embedded HBase (tbeerbower via sumasai)
ATLAS-527 Support lineage for load table, import, export (sumasai via shwethags) ATLAS-527 Support lineage for load table, import, export (sumasai via shwethags)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment