Commit 56c13aa3 by Suma Shivaprasad

ATLAS-635 Process showing old entity name where as actual entity is renamed (…

ATLAS-635 Process showing old entity name where as actual entity is renamed ( svimal2106 via sumasai )
parent 0143486a
...@@ -51,6 +51,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition; ...@@ -51,6 +51,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.log4j.LogManager;
import org.json.JSONObject; import org.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -73,9 +74,9 @@ import java.util.concurrent.TimeUnit; ...@@ -73,9 +74,9 @@ import java.util.concurrent.TimeUnit;
* AtlasHook sends lineage information to the AtlasSever. * AtlasHook sends lineage information to the AtlasSever.
*/ */
public class HiveHook extends AtlasHook implements ExecuteWithHookContext { public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
private static final Logger LOG = LoggerFactory.getLogger(HiveHook.class); private static final Logger LOG = LoggerFactory.getLogger(HiveHook.class);
public static final String CONF_PREFIX = "atlas.hook.hive."; public static final String CONF_PREFIX = "atlas.hook.hive.";
private static final String MIN_THREADS = CONF_PREFIX + "minThreads"; private static final String MIN_THREADS = CONF_PREFIX + "minThreads";
private static final String MAX_THREADS = CONF_PREFIX + "maxThreads"; private static final String MAX_THREADS = CONF_PREFIX + "maxThreads";
...@@ -406,7 +407,9 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -406,7 +407,9 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
final Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName()); final Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
newEntity.set(HiveDataModelGenerator.NAME, newTableQFName); newEntity.set(HiveDataModelGenerator.NAME, newTableQFName);
newEntity.set(HiveDataModelGenerator.TABLE_NAME, newTable.getTableName().toLowerCase()); newEntity.set(HiveDataModelGenerator.TABLE_NAME, newTable.getTableName().toLowerCase());
ArrayList<String> alias_list = new ArrayList<>();
alias_list.add(oldTable.getTableName().toLowerCase());
newEntity.set(HiveDataModelGenerator.TABLE_ALIAS_LIST, alias_list);
messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(), messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(), HiveDataModelGenerator.NAME, HiveDataTypes.HIVE_TABLE.getName(), HiveDataModelGenerator.NAME,
oldTableQFName, newEntity)); oldTableQFName, newEntity));
......
...@@ -65,6 +65,7 @@ public class HiveDataModelGenerator { ...@@ -65,6 +65,7 @@ public class HiveDataModelGenerator {
public static final String PARAMETERS = "parameters"; public static final String PARAMETERS = "parameters";
public static final String COLUMNS = "columns"; public static final String COLUMNS = "columns";
public static final String PART_COLS = "partitionKeys"; public static final String PART_COLS = "partitionKeys";
public static final String TABLE_ALIAS_LIST = "aliases";
public static final String STORAGE_NUM_BUCKETS = "numBuckets"; public static final String STORAGE_NUM_BUCKETS = "numBuckets";
public static final String STORAGE_IS_STORED_AS_SUB_DIRS = "storedAsSubDirectories"; public static final String STORAGE_IS_STORED_AS_SUB_DIRS = "storedAsSubDirectories";
...@@ -257,6 +258,8 @@ public class HiveDataModelGenerator { ...@@ -257,6 +258,8 @@ public class HiveDataModelGenerator {
null), null),
new AttributeDefinition(PART_COLS, DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()), new AttributeDefinition(PART_COLS, DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, true, null), Multiplicity.OPTIONAL, true, null),
new AttributeDefinition(TABLE_ALIAS_LIST, DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("columns", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()), new AttributeDefinition("columns", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, true, null), Multiplicity.OPTIONAL, true, null),
new AttributeDefinition(HiveDataModelGenerator.PARAMETERS, STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition(HiveDataModelGenerator.PARAMETERS, STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
......
...@@ -42,7 +42,7 @@ public enum HiveDataTypes { ...@@ -42,7 +42,7 @@ public enum HiveDataTypes {
HIVE_INDEX, HIVE_INDEX,
HIVE_ROLE, HIVE_ROLE,
HIVE_TYPE, HIVE_TYPE,
HIVE_PROCESS, HIVE_PROCESS
// HIVE_VIEW, // HIVE_VIEW,
; ;
......
...@@ -627,6 +627,20 @@ public class HiveHookIT { ...@@ -627,6 +627,20 @@ public class HiveHookIT {
} }
@Test @Test
public void testAlterTableRenameAliasRegistered() throws Exception{
String tableName = createTable(false);
String tableGuid = assertTableIsRegistered(DEFAULT_DB, tableName);
String newTableName = tableName();
String query = String.format("alter table %s rename to %s", tableName, newTableName);
runCommand(query);
String newTableGuid = assertTableIsRegistered(DEFAULT_DB, newTableName);
Map<String, Object> valueMap = atlasClient.getEntity(newTableGuid).getValuesMap();
Iterable<String> aliasList = (Iterable<String>) valueMap.get("aliases");
String aliasTableName = aliasList.iterator().next();
assert tableName.toLowerCase().equals(aliasTableName);
}
@Test
public void testAlterTableRename() throws Exception { public void testAlterTableRename() throws Exception {
String tableName = createTable(true); String tableName = createTable(true);
final String newDBName = createDatabase(); final String newDBName = createDatabase();
......
...@@ -22,6 +22,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset ...@@ -22,6 +22,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags) ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES: ALL CHANGES:
ATLAS-635 Process showing old entity name where as actual entity is renamed ( svimal2106 via sumasai )
ATLAS-823 Atlas should use external HBase and SOLR (tbeerbower via shwethags) ATLAS-823 Atlas should use external HBase and SOLR (tbeerbower via shwethags)
ATLAS-752 Column renames should retain traits/tags (svimal2106 via shwethags) ATLAS-752 Column renames should retain traits/tags (svimal2106 via shwethags)
ATLAS-821 Atlas UI - Add arrow to navigate to child term (kevalbhatt18 via yhemanth) ATLAS-821 Atlas UI - Add arrow to navigate to child term (kevalbhatt18 via yhemanth)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment