Commit 125dc976 by Shwetha GS

ATLAS-522 Support Alter table commands (sumasai via shwethags)

parent 98fdc6d8
......@@ -165,7 +165,7 @@ public class HiveMetaStoreBridge {
dbRef.set(HiveDataModelGenerator.CLUSTER_NAME, clusterName);
dbRef.set(DESCRIPTION_ATTR, hiveDB.getDescription());
dbRef.set("locationUri", hiveDB.getLocationUri());
dbRef.set("parameters", hiveDB.getParameters());
dbRef.set(HiveDataModelGenerator.PARAMETERS, hiveDB.getParameters());
dbRef.set("ownerName", hiveDB.getOwnerName());
if (hiveDB.getOwnerType() != null) {
dbRef.set("ownerType", hiveDB.getOwnerType().getValue());
......@@ -332,7 +332,7 @@ public class HiveMetaStoreBridge {
List<Referenceable> partKeys = getColumns(hiveTable.getPartitionKeys(), tableQualifiedName);
tableReference.set("partitionKeys", partKeys);
tableReference.set("parameters", hiveTable.getParameters());
tableReference.set(HiveDataModelGenerator.PARAMETERS, hiveTable.getParameters());
if (hiveTable.getViewOriginalText() != null) {
tableReference.set("viewOriginalText", hiveTable.getViewOriginalText());
......@@ -481,7 +481,7 @@ public class HiveMetaStoreBridge {
// ones will fix to identify partitions with differing schema.
partRef.set("sd", sdReferenceable);
partRef.set("parameters", hivePart.getParameters());
partRef.set(HiveDataModelGenerator.PARAMETERS, hivePart.getParameters());
return partRef;
}
......@@ -518,7 +518,7 @@ public class HiveMetaStoreBridge {
serdeInfoStruct.set(HiveDataModelGenerator.NAME, serdeInfo.getName());
serdeInfoStruct.set("serializationLib", serdeInfo.getSerializationLib());
serdeInfoStruct.set("parameters", serdeInfo.getParameters());
serdeInfoStruct.set(HiveDataModelGenerator.PARAMETERS, serdeInfo.getParameters());
sdReferenceable.set("serdeInfo", serdeInfoStruct);
sdReferenceable.set(HiveDataModelGenerator.STORAGE_NUM_BUCKETS, storageDesc.getNumBuckets());
......@@ -547,7 +547,7 @@ public class HiveMetaStoreBridge {
sdReferenceable.set("bucketCols", storageDesc.getBucketCols());
}
sdReferenceable.set("parameters", storageDesc.getParameters());
sdReferenceable.set(HiveDataModelGenerator.PARAMETERS, storageDesc.getParameters());
sdReferenceable.set("storedAsSubDirectories", storageDesc.isStoredAsSubDirectories());
return sdReferenceable;
......
......@@ -224,6 +224,16 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
renameTable(dgiBridge, event);
break;
case ALTERTABLE_FILEFORMAT:
case ALTERTABLE_LOCATION:
case ALTERTABLE_CLUSTER_SORT:
case ALTERTABLE_BUCKETNUM:
case ALTERTABLE_PROPERTIES:
case ALTERTABLE_SERDEPROPERTIES:
case ALTERTABLE_SERIALIZER:
alterTable(dgiBridge, event);
break;
case ALTERVIEW_AS:
//update inputs/outputs?
break;
......@@ -231,7 +241,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
case ALTERTABLE_ADDCOLS:
case ALTERTABLE_REPLACECOLS:
case ALTERTABLE_RENAMECOL:
alterTableColumns(dgiBridge, event);
alterTable(dgiBridge, event);
break;
default:
......@@ -240,21 +250,16 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
notifyEntities(messages);
}
private void alterTableColumns(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception {
private void alterTable(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception {
assert event.inputs != null && event.inputs.size() == 1;
assert event.outputs != null && event.outputs.size() > 0;
for (WriteEntity writeEntity : event.outputs) {
if (writeEntity.getType() == Entity.Type.TABLE) {
Table newTable = writeEntity.getTable();
//Reload table since hive is not providing the updated column set here
Table updatedTable = dgiBridge.hiveClient.getTable(newTable.getDbName(), newTable.getTableName());
writeEntity.setT(updatedTable);
//Create/update table entity
createOrUpdateEntities(dgiBridge, writeEntity);
}
//Below check should filter out partition related
if (writeEntity.getType() == Entity.Type.TABLE) {
//Create/update table entity
createOrUpdateEntities(dgiBridge, writeEntity);
}
}
}
......@@ -280,7 +285,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
tableEntity.set(HiveDataModelGenerator.NAME, oldQualifiedName);
tableEntity.set(HiveDataModelGenerator.TABLE_NAME, oldTable.getTableName().toLowerCase());
String newQualifiedName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(),
newTable.getDbName(), newTable.getTableName());
......@@ -415,4 +419,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
return new JSONObject();
}
}
}
......@@ -64,6 +64,7 @@ public class HiveDataModelGenerator {
private final Map<String, StructTypeDefinition> structTypeDefinitionMap;
public static final String COMMENT = "comment";
public static final String PARAMETERS = "parameters";
public static final String COLUMNS = "columns";
public static final String STORAGE_NUM_BUCKETS = "numBuckets";
......@@ -74,7 +75,10 @@ public class HiveDataModelGenerator {
public static final String CLUSTER_NAME = "clusterName";
public static final String TABLE = "table";
public static final String DB = "db";
public static final String STORAGE_DESC = "sd";
public static final String STORAGE_DESC_INPUT_FMT = "inputFormat";
public static final String STORAGE_DESC_OUTPUT_FMT = "outputFormat";
public HiveDataModelGenerator() {
classTypeDefinitions = new HashMap<>();
......@@ -164,7 +168,7 @@ public class HiveDataModelGenerator {
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("serializationLib", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
false, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),};
new AttributeDefinition(HiveDataModelGenerator.PARAMETERS, STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),};
StructTypeDefinition definition =
new StructTypeDefinition(HiveDataTypes.HIVE_SERDE.getName(), attributeDefinitions);
structTypeDefinitionMap.put(HiveDataTypes.HIVE_SERDE.getName(), definition);
......@@ -235,7 +239,7 @@ public class HiveDataModelGenerator {
null),
new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition(HiveDataModelGenerator.PARAMETERS, STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("ownerType", HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(), Multiplicity.OPTIONAL,
......@@ -288,7 +292,7 @@ public class HiveDataModelGenerator {
null),
new AttributeDefinition("columns", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),};
new AttributeDefinition(HiveDataModelGenerator.PARAMETERS, STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_PARTITION.getName(), null,
ImmutableList.of(AtlasClient.REFERENCEABLE_SUPER_TYPE), attributeDefinitions);
......@@ -314,7 +318,7 @@ public class HiveDataModelGenerator {
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("columns", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition(HiveDataModelGenerator.PARAMETERS, STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
false, null),
new AttributeDefinition("viewExpandedText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
......
......@@ -11,6 +11,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES:
ATLAS-522 Support Alter table commands (sumasai via shwethags)
ATLAS-512 Decouple currently integrating components from availability of Atlas service for raising metadata events ( yhemanth via sumasai)
ATLAS-537 Falcon hook failing when tried to submit a process which creates a hive table ( shwethags via sumasai)
ATLAS-476 Update type attribute with Reserved characters updated the original type as unknown (yhemanth via shwethags)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment