Commit 44c4a289 by Suma Shivaprasad

Fixed eager initilization of singletons, added fix for hive table comment, num buckets..

parent 6dd20ef7
...@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.metastore.api.Index; ...@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.Table;
...@@ -263,11 +264,13 @@ public class HiveMetaStoreBridge { ...@@ -263,11 +264,13 @@ public class HiveMetaStoreBridge {
tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName()); tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
tableRef.set("name", hiveTable.getTableName()); tableRef.set("name", hiveTable.getTableName());
tableRef.set("owner", hiveTable.getOwner()); tableRef.set("owner", hiveTable.getOwner());
//todo fix
tableRef.set("createTime", hiveTable.getLastAccessTime()); tableRef.set("createTime", hiveTable.getMetadata().getProperty(hive_metastoreConstants.DDL_TIME));
tableRef.set("lastAccessTime", hiveTable.getLastAccessTime()); tableRef.set("lastAccessTime", hiveTable.getLastAccessTime());
tableRef.set("retention", hiveTable.getRetention()); tableRef.set("retention", hiveTable.getRetention());
tableRef.set(HiveDataModelGenerator.COMMENT, hiveTable.getParameters().get(HiveDataModelGenerator.COMMENT));
// add reference to the database // add reference to the database
tableRef.set("dbName", dbReference); tableRef.set("dbName", dbReference);
...@@ -416,6 +419,8 @@ public class HiveMetaStoreBridge { ...@@ -416,6 +419,8 @@ public class HiveMetaStoreBridge {
serdeInfoStruct.set("parameters", serdeInfo.getParameters()); serdeInfoStruct.set("parameters", serdeInfo.getParameters());
sdReferenceable.set("serdeInfo", serdeInfoStruct); sdReferenceable.set("serdeInfo", serdeInfoStruct);
sdReferenceable.set(HiveDataModelGenerator.STORAGE_NUM_BUCKETS, storageDesc.getNumBuckets());
sdReferenceable.set(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS, storageDesc.isStoredAsSubDirectories());
// Will need to revisit this after we fix typesystem. // Will need to revisit this after we fix typesystem.
/* /*
...@@ -480,7 +485,7 @@ public class HiveMetaStoreBridge { ...@@ -480,7 +485,7 @@ public class HiveMetaStoreBridge {
Referenceable colReferenceable = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName()); Referenceable colReferenceable = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
colReferenceable.set("name", fs.getName()); colReferenceable.set("name", fs.getName());
colReferenceable.set("type", fs.getType()); colReferenceable.set("type", fs.getType());
colReferenceable.set("comment", fs.getComment()); colReferenceable.set(HiveDataModelGenerator.COMMENT, fs.getComment());
colList.add(createInstance(colReferenceable)); colList.add(createInstance(colReferenceable));
} }
......
...@@ -55,6 +55,10 @@ public class HiveDataModelGenerator { ...@@ -55,6 +55,10 @@ public class HiveDataModelGenerator {
private final Map<String, EnumTypeDefinition> enumTypeDefinitionMap; private final Map<String, EnumTypeDefinition> enumTypeDefinitionMap;
private final Map<String, StructTypeDefinition> structTypeDefinitionMap; private final Map<String, StructTypeDefinition> structTypeDefinitionMap;
public static final String COMMENT = "comment";
public static final String STORAGE_NUM_BUCKETS = "numBuckets";
public static final String STORAGE_IS_STORED_AS_SUB_DIRS = "storedAsSubDirectories";
public HiveDataModelGenerator() { public HiveDataModelGenerator() {
classTypeDefinitions = new HashMap<>(); classTypeDefinitions = new HashMap<>();
enumTypeDefinitionMap = new HashMap<>(); enumTypeDefinitionMap = new HashMap<>();
...@@ -237,7 +241,7 @@ public class HiveDataModelGenerator { ...@@ -237,7 +241,7 @@ public class HiveDataModelGenerator {
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(), new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("numBuckets", DataTypes.INT_TYPE.getName(), new AttributeDefinition(STORAGE_NUM_BUCKETS, DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("serdeInfo", HiveDataTypes.HIVE_SERDE.getName(), new AttributeDefinition("serdeInfo", HiveDataTypes.HIVE_SERDE.getName(),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
...@@ -251,7 +255,7 @@ public class HiveDataModelGenerator { ...@@ -251,7 +255,7 @@ public class HiveDataModelGenerator {
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
//new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.getName(), //new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.getName(),
// Multiplicity.OPTIONAL, false, null), // Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("storedAsSubDirectories", DataTypes.BOOLEAN_TYPE.getName(), new AttributeDefinition(STORAGE_IS_STORED_AS_SUB_DIRS, DataTypes.BOOLEAN_TYPE.getName(),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
}; };
...@@ -326,7 +330,7 @@ public class HiveDataModelGenerator { ...@@ -326,7 +330,7 @@ public class HiveDataModelGenerator {
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("comment", DataTypes.STRING_TYPE.getName(), new AttributeDefinition(COMMENT, DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
}; };
HierarchicalTypeDefinition<ClassType> definition = HierarchicalTypeDefinition<ClassType> definition =
...@@ -377,6 +381,8 @@ public class HiveDataModelGenerator { ...@@ -377,6 +381,8 @@ public class HiveDataModelGenerator {
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(), new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition(COMMENT, DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("retention", DataTypes.INT_TYPE.getName(), new AttributeDefinition("retention", DataTypes.INT_TYPE.getName(),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(), new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(),
......
...@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.Driver; ...@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge; import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes; import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.log4j.spi.LoggerFactory; import org.apache.log4j.spi.LoggerFactory;
...@@ -118,7 +119,7 @@ public class HiveHookIT { ...@@ -118,7 +119,7 @@ public class HiveHookIT {
private String createTable(boolean partition) throws Exception { private String createTable(boolean partition) throws Exception {
String tableName = tableName(); String tableName = tableName();
runCommand("create table " + tableName + "(id int, name string)" + (partition ? " partitioned by(dt string)" runCommand("create table " + tableName + "(id int, name string) comment 'table comment' " + (partition ? " partitioned by(dt string)"
: "")); : ""));
return tableName; return tableName;
} }
...@@ -137,6 +138,7 @@ public class HiveHookIT { ...@@ -137,6 +138,7 @@ public class HiveHookIT {
String tableId = assertTableIsRegistered(DEFAULT_DB, tableName); String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
Referenceable tableRef = dgiCLient.getEntity(tableId); Referenceable tableRef = dgiCLient.getEntity(tableId);
Assert.assertEquals(tableRef.get("tableType"), TableType.MANAGED_TABLE.name()); Assert.assertEquals(tableRef.get("tableType"), TableType.MANAGED_TABLE.name());
Assert.assertEquals(tableRef.get(HiveDataModelGenerator.COMMENT), "table comment");
//Create table where database doesn't exist, will create database instance as well //Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered(DEFAULT_DB); assertDatabaseIsRegistered(DEFAULT_DB);
......
...@@ -48,28 +48,28 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule { ...@@ -48,28 +48,28 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
ThrowingProviderBinder.create(binder()) ThrowingProviderBinder.create(binder())
.bind(GraphProvider.class, TitanGraph.class) .bind(GraphProvider.class, TitanGraph.class)
.to(TitanGraphProvider.class) .to(TitanGraphProvider.class)
.in(Scopes.SINGLETON); .asEagerSingleton();
// allow for dynamic binding of the metadata repo & graph service // allow for dynamic binding of the metadata repo & graph service
// bind the MetadataRepositoryService interface to an implementation // bind the MetadataRepositoryService interface to an implementation
bind(MetadataRepository.class).to(GraphBackedMetadataRepository.class); bind(MetadataRepository.class).to(GraphBackedMetadataRepository.class).asEagerSingleton();
// bind the ITypeStore interface to an implementation // bind the ITypeStore interface to an implementation
bind(ITypeStore.class).to(GraphBackedTypeStore.class); bind(ITypeStore.class).to(GraphBackedTypeStore.class).asEagerSingleton();
// bind the GraphService interface to an implementation // bind the GraphService interface to an implementation
// bind(GraphService.class).to(graphServiceClass); // bind(GraphService.class).to(graphServiceClass);
// bind the MetadataService interface to an implementation // bind the MetadataService interface to an implementation
bind(MetadataService.class).to(DefaultMetadataService.class); bind(MetadataService.class).to(DefaultMetadataService.class).asEagerSingleton();
// bind the DiscoveryService interface to an implementation // bind the DiscoveryService interface to an implementation
bind(DiscoveryService.class).to(GraphBackedDiscoveryService.class); bind(DiscoveryService.class).to(GraphBackedDiscoveryService.class).asEagerSingleton();
bind(SearchIndexer.class).to(GraphBackedSearchIndexer.class); bind(SearchIndexer.class).to(GraphBackedSearchIndexer.class).asEagerSingleton();
bind(LineageService.class).to(HiveLineageService.class); bind(LineageService.class).to(HiveLineageService.class).asEagerSingleton();
MethodInterceptor interceptor = new GraphTransactionInterceptor(); MethodInterceptor interceptor = new GraphTransactionInterceptor();
requestInjection(interceptor); requestInjection(interceptor);
......
...@@ -89,10 +89,11 @@ public class DefaultMetadataService implements MetadataService { ...@@ -89,10 +89,11 @@ public class DefaultMetadataService implements MetadataService {
private void restoreTypeSystem() { private void restoreTypeSystem() {
LOG.info("Restoring type system from the store"); LOG.info("Restoring type system from the store");
try { try {
createSuperTypes();
TypesDef typesDef = typeStore.restore(); TypesDef typesDef = typeStore.restore();
typeSystem.defineTypes(typesDef); typeSystem.defineTypes(typesDef);
createSuperTypes();
} catch (MetadataException e) { } catch (MetadataException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
......
...@@ -57,11 +57,6 @@ import java.util.List; ...@@ -57,11 +57,6 @@ import java.util.List;
@Guice(modules = RepositoryMetadataModule.class) @Guice(modules = RepositoryMetadataModule.class)
public class HiveLineageServiceTest { public class HiveLineageServiceTest {
static {
// this would override super types creation if not first thing
TypeSystem.getInstance().reset();
}
@Inject @Inject
private DefaultMetadataService metadataService; private DefaultMetadataService metadataService;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment