Commit 922a83c9 by Suma Shivaprasad

ATLAS-532 Change Data types of all timestamps in Hive model(currently long)(sumasai via yhemanth)

parent 40ee9492
......@@ -50,6 +50,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
......@@ -63,8 +64,9 @@ public class HiveMetaStoreBridge {
public static final String DESCRIPTION_ATTR = "description";
public static final String TABLE_TYPE_ATTR = "tableType";
public static final String SEARCH_ENTRY_GUID_ATTR = "__guid";
public static final String LAST_ACCESS_TIME_ATTR = "lastAccessTime";
private final String clusterName;
public static final int MILLIS_CONVERT_FACTOR = 1000;
public static final String ATLAS_ENDPOINT = "atlas.rest.address";
......@@ -303,8 +305,21 @@ public class HiveMetaStoreBridge {
tableReference.set(HiveDataModelGenerator.TABLE_NAME, hiveTable.getTableName().toLowerCase());
tableReference.set("owner", hiveTable.getOwner());
tableReference.set("createTime", hiveTable.getMetadata().getProperty(hive_metastoreConstants.DDL_TIME));
tableReference.set("lastAccessTime", hiveTable.getLastAccessTime());
Date createDate = new Date();
if (hiveTable.getMetadata().getProperty(hive_metastoreConstants.DDL_TIME) != null){
try {
createDate = new Date(Long.parseLong(hiveTable.getMetadata().getProperty(hive_metastoreConstants.DDL_TIME)) * MILLIS_CONVERT_FACTOR);
tableReference.set(HiveDataModelGenerator.CREATE_TIME, createDate);
} catch(NumberFormatException ne) {
LOG.error("Error while updating createTime for the table {} ", hiveTable.getCompleteName(), ne);
}
}
Date lastAccessTime = createDate;
if ( hiveTable.getLastAccessTime() > 0) {
lastAccessTime = new Date(hiveTable.getLastAccessTime() * MILLIS_CONVERT_FACTOR);
}
tableReference.set(HiveDataModelGenerator.LAST_ACCESS_TIME, lastAccessTime);
tableReference.set("retention", hiveTable.getRetention());
tableReference.set(HiveDataModelGenerator.COMMENT, hiveTable.getParameters().get(HiveDataModelGenerator.COMMENT));
......
......@@ -53,6 +53,7 @@ import org.slf4j.LoggerFactory;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
......@@ -619,12 +620,12 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
}
processReferenceable.set("name", queryStr);
processReferenceable.set("operationType", hiveEvent.getOperation().getOperationName());
processReferenceable.set("startTime", hiveEvent.getQueryStartTime());
processReferenceable.set("startTime", new Date(hiveEvent.getQueryStartTime()));
processReferenceable.set("userName", hiveEvent.getUser());
processReferenceable.set("queryText", queryStr);
processReferenceable.set("queryId", hiveEvent.getQueryId());
processReferenceable.set("queryPlan", hiveEvent.getJsonPlan());
processReferenceable.set("endTime", System.currentTimeMillis());
processReferenceable.set("endTime", new Date(System.currentTimeMillis()));
//TODO set queryGraph
return processReferenceable;
}
......
......@@ -78,6 +78,9 @@ public class HiveDataModelGenerator {
public static final String STORAGE_DESC_OUTPUT_FMT = "outputFormat";
public static final String OWNER = "owner";
public static final String CREATE_TIME = "createTime";
public static final String LAST_ACCESS_TIME = "lastAccessTime";
public HiveDataModelGenerator() {
classTypeDefinitions = new HashMap<>();
enumTypeDefinitionMap = new HashMap<>();
......@@ -233,9 +236,9 @@ public class HiveDataModelGenerator {
null),
new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition(OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
new AttributeDefinition(CREATE_TIME, DataTypes.DATE_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
new AttributeDefinition(LAST_ACCESS_TIME, DataTypes.DATE_TYPE.getName(), Multiplicity.OPTIONAL, false,
null),
new AttributeDefinition(COMMENT, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("retention", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
......@@ -263,8 +266,8 @@ public class HiveDataModelGenerator {
private void createProcessClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("startTime", DataTypes.LONG_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("endTime", DataTypes.LONG_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("startTime", DataTypes.DATE_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("endTime", DataTypes.DATE_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("userName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
null),
new AttributeDefinition("operationType", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
......
......@@ -31,6 +31,7 @@ import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.utils.ParamChecker;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.RandomStringUtils;
......@@ -38,8 +39,10 @@ import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.hooks.Entity;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.codehaus.jettison.json.JSONObject;
......@@ -49,6 +52,8 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.text.ParseException;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
......@@ -66,6 +71,7 @@ public class HiveHookIT {
public static final String DEFAULT_DB = "default";
private Driver driver;
private AtlasClient atlasClient;
private HiveMetaStoreBridge hiveMetaStoreBridge;
private SessionState ss;
private static final String INPUTS = AtlasClient.PROCESS_ATTRIBUTE_INPUTS;
......@@ -87,7 +93,7 @@ public class HiveHookIT {
Configuration configuration = ApplicationProperties.get();
atlasClient = new AtlasClient(configuration.getString(HiveMetaStoreBridge.ATLAS_ENDPOINT, DGI_URL));
HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(conf, atlasClient);
hiveMetaStoreBridge = new HiveMetaStoreBridge(conf, atlasClient);
hiveMetaStoreBridge.registerHiveDataModel();
}
......@@ -189,6 +195,12 @@ public class HiveHookIT {
Assert.assertEquals(tableRef.get(HiveDataModelGenerator.NAME), entityName);
Assert.assertEquals(tableRef.get(HiveDataModelGenerator.NAME), "default." + tableName.toLowerCase() + "@" + CLUSTER_NAME);
Table t = hiveMetaStoreBridge.hiveClient.getTable(DEFAULT_DB, tableName);
long createTime = Long.parseLong(t.getMetadata().getProperty(hive_metastoreConstants.DDL_TIME)) * HiveMetaStoreBridge.MILLIS_CONVERT_FACTOR;
verifyTimestamps(tableRef, HiveDataModelGenerator.CREATE_TIME, createTime);
verifyTimestamps(tableRef, HiveDataModelGenerator.LAST_ACCESS_TIME, createTime);
final Referenceable sdRef = (Referenceable) tableRef.get("sd");
Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS), false);
......@@ -196,6 +208,21 @@ public class HiveHookIT {
assertDatabaseIsRegistered(DEFAULT_DB);
}
private void verifyTimestamps(Referenceable ref, String property, long expectedTime) throws ParseException {
//Verify timestamps.
String createTimeStr = (String) ref.get(property);
Date createDate = TypeSystem.getInstance().getDateFormat().parse(createTimeStr);
Assert.assertNotNull(createTimeStr);
if (expectedTime > 0) {
Assert.assertEquals(expectedTime, createDate.getTime());
}
}
private void verifyTimestamps(Referenceable ref, String property) throws ParseException {
verifyTimestamps(ref, property, 0);
}
@Test
public void testCreateExternalTable() throws Exception {
String tableName = tableName();
......@@ -208,6 +235,10 @@ public class HiveHookIT {
String tableId = assertTableIsRegistered(dbName, tableName);
Referenceable processReference = validateProcess(query, 1, 1);
verifyTimestamps(processReference, "startTime");
verifyTimestamps(processReference, "endTime");
validateHDFSPaths(processReference, pFile, INPUTS);
validateOutputTables(processReference, tableId);
}
......
......@@ -3,13 +3,14 @@ Apache Atlas Release Notes
--trunk - unreleased
INCOMPATIBLE CHANGES:
ATLAS-532 Change Data types of all timestamps in Hive model(currently long)(sumasai via yhemanth)
ATLAS-622 Introduce soft delete (shwethags)
ATLAS-494 UI Authentication (nixonrodrigues via shwethags)
ATLAS-621 Introduce entity state in Id object (shwethags)
ATLAS-474 Server does not start if the type is updated with same super type class information (dkantor via shwethags)
ATLAS-479 Add description for different types during create time (guptaneeru via shwethags)
ATLAS-521 Support Alter Table column commands (suma.shivaprasad via shwethags)
ATLAS-500 UI: Search Default (sanjayp via shwethags)
ATLAS-525 Drop support for partitions, select query lineage, roles, principals, resource, hive_type...(sumasai via shwethags)
ATLAS-483 Remove client.properties (tbeerbower via shwethags)
ATLAS-349 SSL - Atlas SSL connection has weak/unsafe Ciphers suites (ndjouhr via shwethags)
ATLAS-409 Atlas will not import avro tables with schema read from a file (dossett@gmail.com via shwethags)
......@@ -32,7 +33,6 @@ ATLAS-571 Modify Atlas client for necessary changes in context of HA (yhemanth v
ATLAS-620 Disable hbase based entity audit (shwethags)
ATLAS-618 Fix assembly for hdfs-module (sumasai via yhemanth)
ATLAS-573 Inherited attributes disappear from entities after server restart (dkantor via sumasai)
ATLAS-525 Drop support for partitions, select query lineage, roles, principals, resource, hive_type...(sumasai via shwethags)
ATLAS-599 HDFS Path Model (sumasai via yhemanth)
ATLAS-553 Entity mutation - Fix issue with reordering of elements in array<class> with composite references (sumasai via shwethags)
ATLAS-513 Admin support for HA (yhemanth via sumasai)
......@@ -46,6 +46,7 @@ ATLAS-539 Store for entity audit events (shwethags)
ATLAS-523 Support alter view (sumasai via shwethags)
ATLAS-555 Tag creation from UI fails due to missing description attribute (guptaneeru via shwethags)
ATLAS-522 Support Alter table commands (sumasai via shwethags)
ATLAS-521 Support Alter Table column commands (sumasai via shwethags)
ATLAS-512 Decouple currently integrating components from availability of Atlas service for raising metadata events ( yhemanth via sumasai)
ATLAS-537 Falcon hook failing when tried to submit a process which creates a hive table ( shwethags via sumasai)
ATLAS-476 Update type attribute with Reserved characters updated the original type as unknown (yhemanth via shwethags)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment