Commit 07b8b4d3 by Suma Shivaprasad

ATLAS-758 hdfs location of hive table is pointing to old location even after rename.(sumasai)

parent 54dc670a
...@@ -164,7 +164,7 @@ public class HiveMetaStoreBridge { ...@@ -164,7 +164,7 @@ public class HiveMetaStoreBridge {
dbRef.set(HiveDataModelGenerator.NAME, dbName); dbRef.set(HiveDataModelGenerator.NAME, dbName);
dbRef.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, clusterName); dbRef.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, clusterName);
dbRef.set(DESCRIPTION_ATTR, hiveDB.getDescription()); dbRef.set(DESCRIPTION_ATTR, hiveDB.getDescription());
dbRef.set("locationUri", hiveDB.getLocationUri()); dbRef.set(HiveDataModelGenerator.LOCATION, hiveDB.getLocationUri());
dbRef.set(HiveDataModelGenerator.PARAMETERS, hiveDB.getParameters()); dbRef.set(HiveDataModelGenerator.PARAMETERS, hiveDB.getParameters());
dbRef.set(HiveDataModelGenerator.OWNER, hiveDB.getOwnerName()); dbRef.set(HiveDataModelGenerator.OWNER, hiveDB.getOwnerName());
if (hiveDB.getOwnerType() != null) { if (hiveDB.getOwnerType() != null) {
...@@ -466,7 +466,7 @@ public class HiveMetaStoreBridge { ...@@ -466,7 +466,7 @@ public class HiveMetaStoreBridge {
sdReferenceable.set("sortCols", sortColsStruct); sdReferenceable.set("sortCols", sortColsStruct);
} }
sdReferenceable.set("location", storageDesc.getLocation()); sdReferenceable.set(HiveDataModelGenerator.LOCATION, storageDesc.getLocation());
sdReferenceable.set("inputFormat", storageDesc.getInputFormat()); sdReferenceable.set("inputFormat", storageDesc.getInputFormat());
sdReferenceable.set("outputFormat", storageDesc.getOutputFormat()); sdReferenceable.set("outputFormat", storageDesc.getOutputFormat());
sdReferenceable.set("compressed", storageDesc.isCompressed()); sdReferenceable.set("compressed", storageDesc.isCompressed());
......
...@@ -440,14 +440,11 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -440,14 +440,11 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
private Referenceable replaceTableQFName(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName, final String newTableQFName) throws HiveException { private Referenceable replaceTableQFName(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName, final String newTableQFName) throws HiveException {
tableEntity.set(HiveDataModelGenerator.NAME, oldTableQFName); tableEntity.set(HiveDataModelGenerator.NAME, oldTableQFName);
tableEntity.set(HiveDataModelGenerator.TABLE_NAME, oldTable.getTableName().toLowerCase()); tableEntity.set(HiveDataModelGenerator.TABLE_NAME, oldTable.getTableName().toLowerCase());
final Referenceable newDbInstance = (Referenceable) tableEntity.get(HiveDataModelGenerator.DB);
tableEntity.set(HiveDataModelGenerator.DB, dgiBridge.createDBInstance(dgiBridge.hiveClient.getDatabase(oldTable.getDbName())));
//Replace table entity with new name //Replace table entity with new name
final Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName()); final Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
newEntity.set(HiveDataModelGenerator.NAME, newTableQFName); newEntity.set(HiveDataModelGenerator.NAME, newTableQFName);
newEntity.set(HiveDataModelGenerator.TABLE_NAME, newTable.getTableName().toLowerCase()); newEntity.set(HiveDataModelGenerator.TABLE_NAME, newTable.getTableName().toLowerCase());
newEntity.set(HiveDataModelGenerator.DB, newDbInstance);
messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(), messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(), HiveDataModelGenerator.NAME, HiveDataTypes.HIVE_TABLE.getName(), HiveDataModelGenerator.NAME,
......
...@@ -78,6 +78,7 @@ public class HiveDataModelGenerator { ...@@ -78,6 +78,7 @@ public class HiveDataModelGenerator {
public static final String STORAGE_DESC_INPUT_FMT = "inputFormat"; public static final String STORAGE_DESC_INPUT_FMT = "inputFormat";
public static final String STORAGE_DESC_OUTPUT_FMT = "outputFormat"; public static final String STORAGE_DESC_OUTPUT_FMT = "outputFormat";
public static final String OWNER = "owner"; public static final String OWNER = "owner";
public static final String LOCATION = "location";
public static final String TABLE_TYPE_ATTR = "tableType"; public static final String TABLE_TYPE_ATTR = "tableType";
...@@ -171,7 +172,7 @@ public class HiveDataModelGenerator { ...@@ -171,7 +172,7 @@ public class HiveDataModelGenerator {
//Optional to keep it backward-compatible //Optional to keep it backward-compatible
new AttributeDefinition(TABLE, HiveDataTypes.HIVE_TABLE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition(TABLE, HiveDataTypes.HIVE_TABLE.getName(), Multiplicity.OPTIONAL, false,
STORAGE_DESC), STORAGE_DESC),
new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition(LOCATION, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null), null),
new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null), null),
...@@ -209,7 +210,7 @@ public class HiveDataModelGenerator { ...@@ -209,7 +210,7 @@ public class HiveDataModelGenerator {
null), null),
new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null), null),
new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition(LOCATION, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null), null),
new AttributeDefinition(HiveDataModelGenerator.PARAMETERS, STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition(HiveDataModelGenerator.PARAMETERS, STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition(OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, new AttributeDefinition(OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
......
...@@ -623,7 +623,7 @@ public class HiveHookIT { ...@@ -623,7 +623,7 @@ public class HiveHookIT {
//Add trait to part col keys //Add trait to part col keys
String partColTraitDetails = createTrait(partColumnGuid); String partColTraitDetails = createTrait(partColumnGuid);
String newTableName = tableName(); final String newTableName = tableName();
String query = String.format("alter table %s rename to %s", DEFAULT_DB + "." + tableName, newDBName + "." + newTableName); String query = String.format("alter table %s rename to %s", DEFAULT_DB + "." + tableName, newDBName + "." + newTableName);
runCommand(query); runCommand(query);
...@@ -640,7 +640,15 @@ public class HiveHookIT { ...@@ -640,7 +640,15 @@ public class HiveHookIT {
assertTrait(partColumnGuid, partColTraitDetails); assertTrait(partColumnGuid, partColTraitDetails);
assertTableIsNotRegistered(DEFAULT_DB, tableName); assertTableIsNotRegistered(DEFAULT_DB, tableName);
assertTableIsRegistered(newDBName, newTableName);
assertTableIsRegistered(newDBName, newTableName, new AssertPredicate() {
@Override
public void assertOnEntity(final Referenceable entity) throws Exception {
Referenceable sd = ((Referenceable) entity.get(HiveDataModelGenerator.STORAGE_DESC));
String location = (String) sd.get(HiveDataModelGenerator.LOCATION);
Assert.assertTrue(location.contains(newTableName));
}
});
} }
private List<Referenceable> getColumns(String dbName, String tableName) throws Exception { private List<Referenceable> getColumns(String dbName, String tableName) throws Exception {
...@@ -904,7 +912,7 @@ public class HiveHookIT { ...@@ -904,7 +912,7 @@ public class HiveHookIT {
@Override @Override
public void assertOnEntity(Referenceable tableRef) throws Exception { public void assertOnEntity(Referenceable tableRef) throws Exception {
Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC); Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
Assert.assertEquals(new Path((String)sdRef.get("location")).toString(), new Path(testPath).toString()); Assert.assertEquals(new Path((String)sdRef.get(HiveDataModelGenerator.LOCATION)).toString(), new Path(testPath).toString());
} }
}); });
...@@ -1373,7 +1381,7 @@ public class HiveHookIT { ...@@ -1373,7 +1381,7 @@ public class HiveHookIT {
public void evaluate() throws Exception { public void evaluate() throws Exception {
Referenceable entity = atlasClient.getEntity(typeName, property, value); Referenceable entity = atlasClient.getEntity(typeName, property, value);
assertNotNull(entity); assertNotNull(entity);
if(assertPredicate != null) { if (assertPredicate != null) {
assertPredicate.assertOnEntity(entity); assertPredicate.assertOnEntity(entity);
} }
} }
......
...@@ -20,6 +20,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset ...@@ -20,6 +20,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags) ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES: ALL CHANGES:
ATLAS-758 hdfs location of hive table is pointing to old location even after rename ( sumasai )
ATLAS-667 Entity delete should check for required reverse references ( dkantor via sumasai ) ATLAS-667 Entity delete should check for required reverse references ( dkantor via sumasai )
ATLAS-738 Add query ability on system properties like guid, state, createdtime etc (shwethags) ATLAS-738 Add query ability on system properties like guid, state, createdtime etc (shwethags)
ATLAS-692 Create abstraction layer for graph databases (jnhagelb via yhemanth) ATLAS-692 Create abstraction layer for graph databases (jnhagelb via yhemanth)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment