Commit 1a390f01 by Suma Shivaprasad

ATLAS-529 support drop database(sumasai)

parent 755e59c0
...@@ -20,6 +20,7 @@ package org.apache.atlas.hive.hook; ...@@ -20,6 +20,7 @@ package org.apache.atlas.hive.hook;
import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge; import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataModelGenerator; import org.apache.atlas.hive.model.HiveDataModelGenerator;
import org.apache.atlas.hive.model.HiveDataTypes; import org.apache.atlas.hive.model.HiveDataTypes;
...@@ -347,6 +348,10 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -347,6 +348,10 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
deleteTable(dgiBridge, event); deleteTable(dgiBridge, event);
break; break;
case DROPDATABASE:
deleteDatabase(dgiBridge, event);
break;
default: default:
} }
...@@ -354,15 +359,38 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -354,15 +359,38 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
} }
private void deleteTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event) { private void deleteTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event) {
for (Entity output : event.outputs) { for (WriteEntity output : event.outputs) {
if (Type.TABLE.equals(output.getType())) {
deleteTable(dgiBridge, event, output);
}
}
}
private void deleteTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event, WriteEntity output) {
final String tblQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(), output.getTable().getDbName(), output.getTable().getTableName());
LOG.info("Deleting table {} ", tblQualifiedName);
messages.add(
new HookNotification.EntityDeleteRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(),
HiveDataModelGenerator.NAME,
tblQualifiedName));
}
private void deleteDatabase(HiveMetaStoreBridge dgiBridge, HiveEventContext event) {
if (event.outputs.size() > 1) {
LOG.info("Starting deletion of tables and databases with cascade {} " , event.queryStr);
}
for (WriteEntity output : event.outputs) {
if (Type.TABLE.equals(output.getType())) { if (Type.TABLE.equals(output.getType())) {
final String tblQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(), output.getTable().getDbName(), output.getTable().getTableName()); deleteTable(dgiBridge, event, output);
LOG.info("Deleting table {} ", tblQualifiedName); } else if (Type.DATABASE.equals(output.getType())) {
final String dbQualifiedName = HiveMetaStoreBridge.getDBQualifiedName(dgiBridge.getClusterName(), output.getDatabase().getName());
messages.add( messages.add(
new HookNotification.EntityDeleteRequest(event.getUser(), new HookNotification.EntityDeleteRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(), HiveDataTypes.HIVE_DB.getName(),
HiveDataModelGenerator.NAME, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
tblQualifiedName)); dbQualifiedName));
} }
} }
} }
......
...@@ -877,7 +877,6 @@ public class HiveHookIT { ...@@ -877,7 +877,6 @@ public class HiveHookIT {
assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "name")); assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "name"));
final String query = String.format("drop table %s ", tableName); final String query = String.format("drop table %s ", tableName);
runCommand(query); runCommand(query);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "id")); assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "name")); assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "name"));
...@@ -885,6 +884,65 @@ public class HiveHookIT { ...@@ -885,6 +884,65 @@ public class HiveHookIT {
} }
@Test @Test
public void testDropDatabaseWithCascade() throws Exception {
//Test Deletion of database and its corresponding tables
String dbName = "db" + random();
runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1')");
final int numTables = 10;
String[] tableNames = new String[numTables];
for(int i = 0; i < numTables; i++) {
tableNames[i] = createTable(true, true, false);
}
final String query = String.format("drop database %s cascade", dbName);
runCommand(query);
//Verify columns are not registered for one of the tables
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]), "id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]), "name"));
for(int i = 0; i < numTables; i++) {
assertTableIsNotRegistered(dbName, tableNames[i]);
}
assertDBIsNotRegistered(dbName);
}
@Test
public void testDropDatabaseWithoutCascade() throws Exception {
//Test Deletion of database and its corresponding tables
String dbName = "db" + random();
runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1')");
final int numTables = 10;
String[] tableNames = new String[numTables];
for(int i = 0; i < numTables; i++) {
tableNames[i] = createTable(true, true, false);
String query = String.format("drop table %s", tableNames[i]);
runCommand(query);
assertTableIsNotRegistered(dbName, tableNames[i]);
}
final String query = String.format("drop database %s", dbName);
runCommand(query);
assertDBIsNotRegistered(dbName);
}
@Test
public void testDropNonExistingDB() throws Exception {
//Test Deletion of a non existing DB
final String dbName = "nonexistingdb";
assertDBIsNotRegistered(dbName);
final String query = String.format("drop database if exists %s cascade", dbName);
runCommand(query);
//Should have no effect
assertDBIsNotRegistered(dbName);
assertProcessIsNotRegistered(query);
}
@Test
public void testDropNonExistingTable() throws Exception { public void testDropNonExistingTable() throws Exception {
//Test Deletion of a non existing table //Test Deletion of a non existing table
final String tableName = "nonexistingtable"; final String tableName = "nonexistingtable";
...@@ -1095,6 +1153,14 @@ public class HiveHookIT { ...@@ -1095,6 +1153,14 @@ public class HiveHookIT {
assertEntityIsNotRegistered(QUERY_TYPE.DSL, query); assertEntityIsNotRegistered(QUERY_TYPE.DSL, query);
} }
private void assertDBIsNotRegistered(String dbName) throws Exception {
LOG.debug("Searching for database {}.{}", dbName);
String query = String.format(
"%s as d where name = '%s' and clusterName = '%s'" + " select d",
HiveDataTypes.HIVE_DB.getName(), dbName.toLowerCase(), CLUSTER_NAME);
assertEntityIsNotRegistered(QUERY_TYPE.DSL, query);
}
private String assertTableIsRegistered(String dbName, String tableName) throws Exception { private String assertTableIsRegistered(String dbName, String tableName) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName); LOG.debug("Searching for table {}.{}", dbName, tableName);
String query = String.format( String query = String.format(
......
...@@ -13,6 +13,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset ...@@ -13,6 +13,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags) ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES: ALL CHANGES:
ATLAS-529 support drop database (sumasai)
ATLAS-528 Support drop table,view (sumasai) ATLAS-528 Support drop table,view (sumasai)
ATLAS-603 Document High Availability of Atlas (yhemanth via sumasai) ATLAS-603 Document High Availability of Atlas (yhemanth via sumasai)
ATLAS-498 Support Embedded HBase (tbeerbower via sumasai) ATLAS-498 Support Embedded HBase (tbeerbower via sumasai)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment