Commit 07b8b4d3 by Suma Shivaprasad

ATLAS-758 hdfs location of hive table is pointing to old location even after rename.(sumasai)

parent 54dc670a
...@@ -164,7 +164,7 @@ public class HiveMetaStoreBridge { ...@@ -164,7 +164,7 @@ public class HiveMetaStoreBridge {
dbRef.set(HiveDataModelGenerator.NAME, dbName); dbRef.set(HiveDataModelGenerator.NAME, dbName);
dbRef.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, clusterName); dbRef.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, clusterName);
dbRef.set(DESCRIPTION_ATTR, hiveDB.getDescription()); dbRef.set(DESCRIPTION_ATTR, hiveDB.getDescription());
dbRef.set("locationUri", hiveDB.getLocationUri()); dbRef.set(HiveDataModelGenerator.LOCATION, hiveDB.getLocationUri());
dbRef.set(HiveDataModelGenerator.PARAMETERS, hiveDB.getParameters()); dbRef.set(HiveDataModelGenerator.PARAMETERS, hiveDB.getParameters());
dbRef.set(HiveDataModelGenerator.OWNER, hiveDB.getOwnerName()); dbRef.set(HiveDataModelGenerator.OWNER, hiveDB.getOwnerName());
if (hiveDB.getOwnerType() != null) { if (hiveDB.getOwnerType() != null) {
...@@ -466,7 +466,7 @@ public class HiveMetaStoreBridge { ...@@ -466,7 +466,7 @@ public class HiveMetaStoreBridge {
sdReferenceable.set("sortCols", sortColsStruct); sdReferenceable.set("sortCols", sortColsStruct);
} }
sdReferenceable.set("location", storageDesc.getLocation()); sdReferenceable.set(HiveDataModelGenerator.LOCATION, storageDesc.getLocation());
sdReferenceable.set("inputFormat", storageDesc.getInputFormat()); sdReferenceable.set("inputFormat", storageDesc.getInputFormat());
sdReferenceable.set("outputFormat", storageDesc.getOutputFormat()); sdReferenceable.set("outputFormat", storageDesc.getOutputFormat());
sdReferenceable.set("compressed", storageDesc.isCompressed()); sdReferenceable.set("compressed", storageDesc.isCompressed());
......
...@@ -440,14 +440,11 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -440,14 +440,11 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
private Referenceable replaceTableQFName(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName, final String newTableQFName) throws HiveException { private Referenceable replaceTableQFName(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName, final String newTableQFName) throws HiveException {
tableEntity.set(HiveDataModelGenerator.NAME, oldTableQFName); tableEntity.set(HiveDataModelGenerator.NAME, oldTableQFName);
tableEntity.set(HiveDataModelGenerator.TABLE_NAME, oldTable.getTableName().toLowerCase()); tableEntity.set(HiveDataModelGenerator.TABLE_NAME, oldTable.getTableName().toLowerCase());
final Referenceable newDbInstance = (Referenceable) tableEntity.get(HiveDataModelGenerator.DB);
tableEntity.set(HiveDataModelGenerator.DB, dgiBridge.createDBInstance(dgiBridge.hiveClient.getDatabase(oldTable.getDbName())));
//Replace table entity with new name //Replace table entity with new name
final Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName()); final Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
newEntity.set(HiveDataModelGenerator.NAME, newTableQFName); newEntity.set(HiveDataModelGenerator.NAME, newTableQFName);
newEntity.set(HiveDataModelGenerator.TABLE_NAME, newTable.getTableName().toLowerCase()); newEntity.set(HiveDataModelGenerator.TABLE_NAME, newTable.getTableName().toLowerCase());
newEntity.set(HiveDataModelGenerator.DB, newDbInstance);
messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(), messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(), HiveDataModelGenerator.NAME, HiveDataTypes.HIVE_TABLE.getName(), HiveDataModelGenerator.NAME,
......
...@@ -78,6 +78,7 @@ public class HiveDataModelGenerator { ...@@ -78,6 +78,7 @@ public class HiveDataModelGenerator {
public static final String STORAGE_DESC_INPUT_FMT = "inputFormat"; public static final String STORAGE_DESC_INPUT_FMT = "inputFormat";
public static final String STORAGE_DESC_OUTPUT_FMT = "outputFormat"; public static final String STORAGE_DESC_OUTPUT_FMT = "outputFormat";
public static final String OWNER = "owner"; public static final String OWNER = "owner";
public static final String LOCATION = "location";
public static final String TABLE_TYPE_ATTR = "tableType"; public static final String TABLE_TYPE_ATTR = "tableType";
...@@ -171,7 +172,7 @@ public class HiveDataModelGenerator { ...@@ -171,7 +172,7 @@ public class HiveDataModelGenerator {
//Optional to keep it backward-compatible //Optional to keep it backward-compatible
new AttributeDefinition(TABLE, HiveDataTypes.HIVE_TABLE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition(TABLE, HiveDataTypes.HIVE_TABLE.getName(), Multiplicity.OPTIONAL, false,
STORAGE_DESC), STORAGE_DESC),
new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition(LOCATION, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null), null),
new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null), null),
...@@ -209,7 +210,7 @@ public class HiveDataModelGenerator { ...@@ -209,7 +210,7 @@ public class HiveDataModelGenerator {
null), null),
new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null), null),
new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition(LOCATION, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null), null),
new AttributeDefinition(HiveDataModelGenerator.PARAMETERS, STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition(HiveDataModelGenerator.PARAMETERS, STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition(OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition(OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
......
...@@ -623,7 +623,7 @@ public class HiveHookIT { ...@@ -623,7 +623,7 @@ public class HiveHookIT {
//Add trait to part col keys //Add trait to part col keys
String partColTraitDetails = createTrait(partColumnGuid); String partColTraitDetails = createTrait(partColumnGuid);
String newTableName = tableName(); final String newTableName = tableName();
String query = String.format("alter table %s rename to %s", DEFAULT_DB + "." + tableName, newDBName + "." + newTableName); String query = String.format("alter table %s rename to %s", DEFAULT_DB + "." + tableName, newDBName + "." + newTableName);
runCommand(query); runCommand(query);
...@@ -640,7 +640,15 @@ public class HiveHookIT { ...@@ -640,7 +640,15 @@ public class HiveHookIT {
assertTrait(partColumnGuid, partColTraitDetails); assertTrait(partColumnGuid, partColTraitDetails);
assertTableIsNotRegistered(DEFAULT_DB, tableName); assertTableIsNotRegistered(DEFAULT_DB, tableName);
assertTableIsRegistered(newDBName, newTableName);
assertTableIsRegistered(newDBName, newTableName, new AssertPredicate() {
@Override
public void assertOnEntity(final Referenceable entity) throws Exception {
Referenceable sd = ((Referenceable) entity.get(HiveDataModelGenerator.STORAGE_DESC));
String location = (String) sd.get(HiveDataModelGenerator.LOCATION);
Assert.assertTrue(location.contains(newTableName));
}
});
} }
private List<Referenceable> getColumns(String dbName, String tableName) throws Exception { private List<Referenceable> getColumns(String dbName, String tableName) throws Exception {
...@@ -673,8 +681,8 @@ public class HiveHookIT { ...@@ -673,8 +681,8 @@ public class HiveHookIT {
runCommand(query); runCommand(query);
assertColumnIsRegistered(HiveMetaStoreBridge assertColumnIsRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
column)); column));
//Verify the number of columns present in the table //Verify the number of columns present in the table
final List<Referenceable> columns = getColumns(DEFAULT_DB, tableName); final List<Referenceable> columns = getColumns(DEFAULT_DB, tableName);
...@@ -904,7 +912,7 @@ public class HiveHookIT { ...@@ -904,7 +912,7 @@ public class HiveHookIT {
@Override @Override
public void assertOnEntity(Referenceable tableRef) throws Exception { public void assertOnEntity(Referenceable tableRef) throws Exception {
Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC); Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
Assert.assertEquals(new Path((String)sdRef.get("location")).toString(), new Path(testPath).toString()); Assert.assertEquals(new Path((String)sdRef.get(HiveDataModelGenerator.LOCATION)).toString(), new Path(testPath).toString());
} }
}); });
...@@ -1009,7 +1017,7 @@ public class HiveHookIT { ...@@ -1009,7 +1017,7 @@ public class HiveHookIT {
ImmutableList<String> sortcolNames) throws Exception { ImmutableList<String> sortcolNames) throws Exception {
Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC); Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
Assert.assertEquals(((scala.math.BigInt) sdRef.get(HiveDataModelGenerator.STORAGE_NUM_BUCKETS)).intValue(), Assert.assertEquals(((scala.math.BigInt) sdRef.get(HiveDataModelGenerator.STORAGE_NUM_BUCKETS)).intValue(),
numBuckets); numBuckets);
Assert.assertEquals(sdRef.get("bucketCols"), bucketColNames); Assert.assertEquals(sdRef.get("bucketCols"), bucketColNames);
List<Struct> hiveOrderStructList = (List<Struct>) sdRef.get("sortCols"); List<Struct> hiveOrderStructList = (List<Struct>) sdRef.get("sortCols");
...@@ -1050,11 +1058,11 @@ public class HiveHookIT { ...@@ -1050,11 +1058,11 @@ public class HiveHookIT {
final String query = String.format("drop table %s ", tableName); final String query = String.format("drop table %s ", tableName);
runCommand(query); runCommand(query);
assertColumnIsNotRegistered(HiveMetaStoreBridge assertColumnIsNotRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
"id")); "id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge assertColumnIsNotRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
HiveDataModelGenerator.NAME)); HiveDataModelGenerator.NAME));
assertTableIsNotRegistered(DEFAULT_DB, tableName); assertTableIsNotRegistered(DEFAULT_DB, tableName);
} }
...@@ -1077,8 +1085,8 @@ public class HiveHookIT { ...@@ -1077,8 +1085,8 @@ public class HiveHookIT {
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName( assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]), "id")); HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]), "id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge assertColumnIsNotRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]), .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]),
HiveDataModelGenerator.NAME)); HiveDataModelGenerator.NAME));
for(int i = 0; i < numTables; i++) { for(int i = 0; i < numTables; i++) {
assertTableIsNotRegistered(dbName, tableNames[i]); assertTableIsNotRegistered(dbName, tableNames[i]);
...@@ -1152,8 +1160,8 @@ public class HiveHookIT { ...@@ -1152,8 +1160,8 @@ public class HiveHookIT {
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName),
"id")); "id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge assertColumnIsNotRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName),
HiveDataModelGenerator.NAME)); HiveDataModelGenerator.NAME));
assertTableIsNotRegistered(DEFAULT_DB, viewName); assertTableIsNotRegistered(DEFAULT_DB, viewName);
} }
...@@ -1348,7 +1356,7 @@ public class HiveHookIT { ...@@ -1348,7 +1356,7 @@ public class HiveHookIT {
LOG.debug("Searching for table {}.{}", dbName, tableName); LOG.debug("Searching for table {}.{}", dbName, tableName);
String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, isTemporary); String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, isTemporary);
return assertEntityIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.NAME, tableQualifiedName, return assertEntityIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.NAME, tableQualifiedName,
assertPredicate); assertPredicate);
} }
private String assertTableIsRegistered(String dbName, String tableName, AssertPredicate assertPredicate) throws Exception { private String assertTableIsRegistered(String dbName, String tableName, AssertPredicate assertPredicate) throws Exception {
...@@ -1373,7 +1381,7 @@ public class HiveHookIT { ...@@ -1373,7 +1381,7 @@ public class HiveHookIT {
public void evaluate() throws Exception { public void evaluate() throws Exception {
Referenceable entity = atlasClient.getEntity(typeName, property, value); Referenceable entity = atlasClient.getEntity(typeName, property, value);
assertNotNull(entity); assertNotNull(entity);
if(assertPredicate != null) { if (assertPredicate != null) {
assertPredicate.assertOnEntity(entity); assertPredicate.assertOnEntity(entity);
} }
} }
......
...@@ -20,6 +20,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset ...@@ -20,6 +20,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags) ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES: ALL CHANGES:
ATLAS-758 hdfs location of hive table is pointing to old location even after rename ( sumasai )
ATLAS-667 Entity delete should check for required reverse references ( dkantor via sumasai ) ATLAS-667 Entity delete should check for required reverse references ( dkantor via sumasai )
ATLAS-738 Add query ability on system properties like guid, state, createdtime etc (shwethags) ATLAS-738 Add query ability on system properties like guid, state, createdtime etc (shwethags)
ATLAS-692 Create abstraction layer for graph databases (jnhagelb via yhemanth) ATLAS-692 Create abstraction layer for graph databases (jnhagelb via yhemanth)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment