Commit 1089e036 by Venkat Ranganathan

Fix Hive Importer to work with Graph repository and needed refactoring. Also…

Fix Hive Importer to work with Graph repository and needed refactoring. Also fix enum handling in the repository
parent 0e9af017
...@@ -55,6 +55,10 @@ ...@@ -55,6 +55,10 @@
<artifactId>metadata-typesystem</artifactId> <artifactId>metadata-typesystem</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-repository</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
</dependency> </dependency>
......
...@@ -18,12 +18,10 @@ ...@@ -18,12 +18,10 @@
package org.apache.hadoop.metadata.hivetypes; package org.apache.hadoop.metadata.hivetypes;
;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.metastore.api.*;
import org.apache.hadoop.metadata.*; import org.apache.hadoop.metadata.*;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.storage.IRepository; import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.storage.Id; import org.apache.hadoop.metadata.storage.Id;
import org.apache.hadoop.metadata.storage.RepositoryException; import org.apache.hadoop.metadata.storage.RepositoryException;
...@@ -32,18 +30,23 @@ import org.apache.hadoop.metadata.types.Multiplicity; ...@@ -32,18 +30,23 @@ import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructType; import org.apache.hadoop.metadata.types.StructType;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
import org.apache.thrift.TException; import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
;
public class HiveImporter { public class HiveImporter {
private final HiveMetaStoreClient hiveMetastoreClient; private final HiveMetaStoreClient hiveMetastoreClient;
public static final Log LOG = LogFactory.getLog(HiveImporter.class); private static final Logger LOG =
LoggerFactory.getLogger(HiveImporter.class);
private TypeSystem typeSystem; private TypeSystem typeSystem;
private IRepository repository; private IRepository repository;
private MetadataRepository graphRepository;
private HiveTypeSystem hiveTypeSystem; private HiveTypeSystem hiveTypeSystem;
private List<Id> dbInstances; private List<Id> dbInstances;
...@@ -51,27 +54,41 @@ public class HiveImporter { ...@@ -51,27 +54,41 @@ public class HiveImporter {
private List<Id> partitionInstances; private List<Id> partitionInstances;
private List<Id> columnInstances; private List<Id> columnInstances;
public HiveImporter(MetadataRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException {
this(hts, hmc);
if (repo == null) {
LOG.error("repository is null");
throw new RuntimeException("repository is null");
}
this.graphRepository = repo;
}
public HiveImporter(IRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException { public HiveImporter(IRepository repo, HiveTypeSystem hts, HiveMetaStoreClient hmc) throws RepositoryException {
this.repository = repo; this(hts, hmc);
this.hiveMetastoreClient = hmc;
this.hiveTypeSystem = hts;
typeSystem = TypeSystem.getInstance();
dbInstances = new ArrayList<>();
tableInstances = new ArrayList<>();
partitionInstances = new ArrayList<>();
columnInstances = new ArrayList<>();
if (repository == null) { if (repo == null) {
LOG.error("repository is null"); LOG.error("repository is null");
throw new RuntimeException("repository is null"); throw new RuntimeException("repository is null");
} }
repository = repo;
repository.defineTypes(hts.getHierarchicalTypeDefinitions()); repository.defineTypes(hts.getHierarchicalTypeDefinitions());
} }
private HiveImporter(HiveTypeSystem hts, HiveMetaStoreClient hmc) {
this.hiveMetastoreClient = hmc;
this.hiveTypeSystem = hts;
typeSystem = TypeSystem.getInstance();
dbInstances = new ArrayList<>();
tableInstances = new ArrayList<>();
partitionInstances = new ArrayList<>();
columnInstances = new ArrayList<>();
}
public List<Id> getDBInstances() { public List<Id> getDBInstances() {
return dbInstances; return dbInstances;
...@@ -102,6 +119,21 @@ public class HiveImporter { ...@@ -102,6 +119,21 @@ public class HiveImporter {
} }
} }
private ITypedReferenceableInstance createInstance(Referenceable ref)
throws MetadataException {
if (repository != null) {
return repository.create(ref);
} else {
String typeName = ref.getTypeName();
IDataType dataType = hiveTypeSystem.getDataType(typeName);
LOG.debug("creating instance of type " + typeName + " dataType " + dataType);
ITypedReferenceableInstance instance =
(ITypedReferenceableInstance) dataType.convert(ref, Multiplicity.OPTIONAL);
graphRepository.createEntity(instance, typeName);
return instance;
}
}
private void importDatabase(String db) throws MetadataException { private void importDatabase(String db) throws MetadataException {
try { try {
LOG.info("Importing objects from database : " + db); LOG.info("Importing objects from database : " + db);
...@@ -113,8 +145,8 @@ public class HiveImporter { ...@@ -113,8 +145,8 @@ public class HiveImporter {
dbRef.set("locationUri", hiveDB.getLocationUri()); dbRef.set("locationUri", hiveDB.getLocationUri());
dbRef.set("parameters", hiveDB.getParameters()); dbRef.set("parameters", hiveDB.getParameters());
dbRef.set("ownerName", hiveDB.getOwnerName()); dbRef.set("ownerName", hiveDB.getOwnerName());
dbRef.set("ownerType", hiveDB.getOwnerType().toString()); dbRef.set("ownerType", hiveDB.getOwnerType().getValue());
ITypedReferenceableInstance dbRefTyped = repository.create(dbRef); ITypedReferenceableInstance dbRefTyped = createInstance(dbRef);
dbInstances.add(dbRefTyped.getId()); dbInstances.add(dbRefTyped.getId());
importTables(db, dbRefTyped); importTables(db, dbRefTyped);
} catch (NoSuchObjectException nsoe) { } catch (NoSuchObjectException nsoe) {
...@@ -153,7 +185,7 @@ public class HiveImporter { ...@@ -153,7 +185,7 @@ public class HiveImporter {
colRef.set("name", fs.getName()); colRef.set("name", fs.getName());
colRef.set("type", fs.getType()); colRef.set("type", fs.getType());
colRef.set("comment", fs.getComment()); colRef.set("comment", fs.getComment());
ITypedReferenceableInstance colRefTyped = repository.create(colRef); ITypedReferenceableInstance colRefTyped = createInstance(colRef);
partKeys.add(colRefTyped); partKeys.add(colRefTyped);
} }
tableRef.set("partitionKeys", partKeys); tableRef.set("partitionKeys", partKeys);
...@@ -168,7 +200,7 @@ public class HiveImporter { ...@@ -168,7 +200,7 @@ public class HiveImporter {
tableRef.set("tableType", hiveTable.getTableType()); tableRef.set("tableType", hiveTable.getTableType());
tableRef.set("temporary", hiveTable.isTemporary()); tableRef.set("temporary", hiveTable.isTemporary());
ITypedReferenceableInstance tableRefTyped = repository.create(tableRef); ITypedReferenceableInstance tableRefTyped = createInstance(tableRef);
tableInstances.add(tableRefTyped.getId()); tableInstances.add(tableRefTyped.getId());
...@@ -186,7 +218,7 @@ public class HiveImporter { ...@@ -186,7 +218,7 @@ public class HiveImporter {
partRef.set("sd", sdStruct); partRef.set("sd", sdStruct);
partRef.set("columns", sdStruct.get("cols")); partRef.set("columns", sdStruct.get("cols"));
partRef.set("parameters", hivePart.getParameters()); partRef.set("parameters", hivePart.getParameters());
ITypedReferenceableInstance partRefTyped = repository.create(partRef); ITypedReferenceableInstance partRefTyped = createInstance(partRef);
partitionInstances.add(partRefTyped.getId()); partitionInstances.add(partRefTyped.getId());
} }
} }
...@@ -251,7 +283,7 @@ public class HiveImporter { ...@@ -251,7 +283,7 @@ public class HiveImporter {
colRef.set("name", fs.getName()); colRef.set("name", fs.getName());
colRef.set("type", fs.getType()); colRef.set("type", fs.getType());
colRef.set("comment", fs.getComment()); colRef.set("comment", fs.getComment());
ITypedReferenceableInstance colRefTyped = repository.create(colRef); ITypedReferenceableInstance colRefTyped = createInstance(colRef);
fieldsList.add(colRefTyped); fieldsList.add(colRefTyped);
columnInstances.add(colRefTyped.getId()); columnInstances.add(colRefTyped.getId());
} }
......
...@@ -23,6 +23,8 @@ import org.apache.commons.logging.Log; ...@@ -23,6 +23,8 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.types.*; import org.apache.hadoop.metadata.types.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
...@@ -31,7 +33,9 @@ import java.util.Map; ...@@ -31,7 +33,9 @@ import java.util.Map;
public class HiveTypeSystem { public class HiveTypeSystem {
public static final Log LOG = LogFactory.getLog(HiveTypeSystem.class);
private static final Logger LOG =
LoggerFactory.getLogger(HiveTypeSystem.class);
public static final class Holder { public static final class Holder {
public static final HiveTypeSystem instance = new HiveTypeSystem(); public static final HiveTypeSystem instance = new HiveTypeSystem();
} }
......
...@@ -16,13 +16,13 @@ ...@@ -16,13 +16,13 @@
# limitations under the License. # limitations under the License.
# #
org.apache.hadoop.metadata=INFO, console org.apache.hadoop.metadata=DEBUG, console
org.apache.hadoop=INFO, console org.apache.hadoop=DEBUG, console
org.apache.hive=INFO, console org.apache.hive=INFO, console
org.apache.hcatalog=INFO, console org.apache.hcatalog=INFO, console
metadata.root.logger=INFO,console,DRFA metadata.root.logger=DEBUG,console,DRFA
hive.root.logger=INFO,console,DRFA hive.root.logger=INFO,console,DRFA
hcatalog.root.logger=INFO,console,DRFA hcatalog.root.logger=INFO,console,DRFA
metadata.log.dir=${user.dir}/metadata/logs metadata.log.dir=${user.dir}/metadata/logs
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.hivetypes;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphService;
import org.apache.hadoop.metadata.repository.graph.TitanGraphProvider;
import org.apache.hadoop.metadata.repository.graph.TitanGraphService;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.List;
public class HiveGraphRepositoryTest {
protected HiveTypeSystem hts;
GraphBackedMetadataRepository repository;
private static final Logger LOG =
LoggerFactory.getLogger(HiveGraphRepositoryTest.class);
@Before
public void setup() throws ConfigurationException, MetadataException {
TypeSystem ts = TypeSystem.getInstance();
GraphService gs = new TitanGraphService(new TitanGraphProvider());
repository = new GraphBackedMetadataRepository(gs);
hts = HiveTypeSystem.getInstance();
}
@Test
public void testHiveImport() throws MetaException, MetadataException, IOException {
HiveImporter hImporter = new HiveImporter(repository, hts, new HiveMetaStoreClient(new HiveConf()));
hImporter.importHiveMetadata();
LOG.info("Defined DB instances");
FileWriter fw = new FileWriter("hiveobjs.txt");
BufferedWriter bw = new BufferedWriter(fw);
List<String> idList =
repository.getEntityList(HiveTypeSystem.DefinedTypes.HIVE_DB.name());
for (String id : idList) {
ITypedReferenceableInstance instance = repository.getEntityDefinition(id);
bw.write(instance.toString());
}
LOG.info("Defined Table instances");
idList =
repository.getEntityList(HiveTypeSystem.DefinedTypes.HIVE_TABLE.name());
for (String id : idList) {
ITypedReferenceableInstance instance = repository.getEntityDefinition(id);
bw.write(instance.toString());
}
LOG.info("Defined Partition instances");
idList =
repository.getEntityList(HiveTypeSystem.DefinedTypes.HIVE_PARTITION.name());
for (String id : idList) {
ITypedReferenceableInstance instance = repository.getEntityDefinition(id);
bw.write(instance.toString());
}
LOG.info("Defined Column instances");
idList =
repository.getEntityList(HiveTypeSystem.DefinedTypes.HIVE_COLUMN.name());
for (String id : idList) {
ITypedReferenceableInstance instance = repository.getEntityDefinition(id);
bw.write(instance.toString());
}
bw.flush();
bw.close();
}
}
...@@ -30,13 +30,22 @@ import org.apache.hadoop.metadata.storage.memory.MemRepository; ...@@ -30,13 +30,22 @@ import org.apache.hadoop.metadata.storage.memory.MemRepository;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
public class HiveTypeSystemTest { public class HiveTypeSystemTest {
protected MemRepository mr; protected MemRepository mr;
protected HiveTypeSystem hts; protected HiveTypeSystem hts;
public static final Log LOG = LogFactory.getLog(HiveTypeSystemTest.class);
private static final Logger LOG =
LoggerFactory.getLogger(HiveTypeSystemTest.class);
@Before @Before
public void setup() throws MetadataException { public void setup() throws MetadataException {
...@@ -48,30 +57,34 @@ public class HiveTypeSystemTest { ...@@ -48,30 +57,34 @@ public class HiveTypeSystemTest {
} }
@Test @Test
public void testHiveImport() throws MetaException, MetadataException { public void testHiveImport() throws MetaException, MetadataException, IOException {
HiveImporter himport = new HiveImporter(mr, hts, new HiveMetaStoreClient(new HiveConf())); HiveImporter hImporter = new HiveImporter(mr, hts, new HiveMetaStoreClient(new HiveConf()));
himport.importHiveMetadata(); hImporter.importHiveMetadata();
LOG.info("Defined DB instances"); LOG.info("Defined DB instances");
for (Id id : himport.getDBInstances()) { FileWriter fw = new FileWriter("hiveobjs.txt");
BufferedWriter bw = new BufferedWriter(fw);
for (Id id : hImporter.getDBInstances()) {
ITypedReferenceableInstance instance = mr.get(id); ITypedReferenceableInstance instance = mr.get(id);
LOG.info(instance.toString()); bw.write(instance.toString());
} }
LOG.info("Defined Table instances"); LOG.info("Defined Table instances");
for (Id id : himport.getTableInstances()) { for (Id id : hImporter.getTableInstances()) {
ITypedReferenceableInstance instance = mr.get(id); ITypedReferenceableInstance instance = mr.get(id);
LOG.info(instance.toString()); bw.write(instance.toString());
} }
LOG.info("Defined Partition instances"); LOG.info("Defined Partition instances");
for (Id id : himport.getPartitionInstances()) { for (Id id : hImporter.getPartitionInstances()) {
ITypedReferenceableInstance instance = mr.get(id); ITypedReferenceableInstance instance = mr.get(id);
LOG.info(instance.toString()); bw.write(instance.toString());
} }
LOG.info("Defined Column instances"); LOG.info("Defined Column instances");
for (Id id : himport.getColumnInstances()) { for (Id id : hImporter.getColumnInstances()) {
ITypedReferenceableInstance instance = mr.get(id); ITypedReferenceableInstance instance = mr.get(id);
LOG.info(instance.toString()); bw.write(instance.toString());
} }
bw.flush();
bw.close();
} }
} }
\ No newline at end of file
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# GraphService implementation
metadata.graph.impl.class=org.apache.hadoop.metadata.repository.graph.TitanGraphService
# Graph implementation
#metadata.graph.blueprints.graph=com.thinkaurelius.titan.core.TitanFactory
# Graph Storage
metadata.graph.storage.backend=inmemory
# Graph Search Index
#metadata.graph.index.search.backend=elasticsearch
#metadata.graph.index.search.directory=target/data/es
#metadata.graph.index.search.elasticsearch.client-only=false
#metadata.graph.index.search.elasticsearch.local-mode=true
metadata.enableTLS=false
storage.backend=inmemory
# Graph Search Index
index.search.backend=elasticsearch
index.search.directory=target/data/es
index.search.elasticsearch.client-only=false
index.search.elasticsearch.local-mode=true
\ No newline at end of file
...@@ -83,7 +83,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -83,7 +83,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
private final TypeSystem typeSystem; private final TypeSystem typeSystem;
@Inject @Inject
GraphBackedMetadataRepository(GraphService graphService) throws MetadataException { public GraphBackedMetadataRepository(GraphService graphService) throws MetadataException {
this.graphService = graphService; this.graphService = graphService;
this.typeSystem = TypeSystem.getInstance(); this.typeSystem = TypeSystem.getInstance();
} }
......
...@@ -66,7 +66,7 @@ public class TitanGraphService implements GraphService { ...@@ -66,7 +66,7 @@ public class TitanGraphService implements GraphService {
* @throws ConfigurationException * @throws ConfigurationException
*/ */
@Inject @Inject
TitanGraphService(GraphProvider<TitanGraph> graph) throws ConfigurationException { public TitanGraphService(GraphProvider<TitanGraph> graph) throws ConfigurationException {
// TODO reimplement to save the Provider and initialize the graph inside the start() method // TODO reimplement to save the Provider and initialize the graph inside the start() method
this.titanGraph = graph.get(); this.titanGraph = graph.get();
//start(); //start();
......
...@@ -227,7 +227,7 @@ public class StructInstance implements ITypedStruct { ...@@ -227,7 +227,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.BOOLEAN_TYPE ) { if ( i.dataType() != DataTypes.BOOLEAN_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.BOOLEAN_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -248,7 +248,7 @@ public class StructInstance implements ITypedStruct { ...@@ -248,7 +248,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.BYTE_TYPE ) { if ( i.dataType() != DataTypes.BYTE_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.BYTE_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -269,7 +269,7 @@ public class StructInstance implements ITypedStruct { ...@@ -269,7 +269,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.SHORT_TYPE ) { if ( i.dataType() != DataTypes.SHORT_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.SHORT_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -288,9 +288,10 @@ public class StructInstance implements ITypedStruct { ...@@ -288,9 +288,10 @@ public class StructInstance implements ITypedStruct {
throw new MetadataException(String.format("Unknown field %s for Struct %s", attrName, getTypeName())); throw new MetadataException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
} }
if ( i.dataType() != DataTypes.INT_TYPE ) {
if ( i.dataType() != DataTypes.INT_TYPE && !(i.dataType() instanceof EnumType)) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.INT_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -311,7 +312,7 @@ public class StructInstance implements ITypedStruct { ...@@ -311,7 +312,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.LONG_TYPE ) { if ( i.dataType() != DataTypes.LONG_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.LONG_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -332,7 +333,7 @@ public class StructInstance implements ITypedStruct { ...@@ -332,7 +333,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.FLOAT_TYPE ) { if ( i.dataType() != DataTypes.FLOAT_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.FLOAT_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -353,7 +354,7 @@ public class StructInstance implements ITypedStruct { ...@@ -353,7 +354,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.DOUBLE_TYPE ) { if ( i.dataType() != DataTypes.DOUBLE_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.DOUBLE_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -374,7 +375,7 @@ public class StructInstance implements ITypedStruct { ...@@ -374,7 +375,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.BIGINTEGER_TYPE ) { if ( i.dataType() != DataTypes.BIGINTEGER_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.BIGINTEGER_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -395,7 +396,7 @@ public class StructInstance implements ITypedStruct { ...@@ -395,7 +396,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.BIGDECIMAL_TYPE ) { if ( i.dataType() != DataTypes.BIGDECIMAL_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.BIGDECIMAL_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -416,7 +417,7 @@ public class StructInstance implements ITypedStruct { ...@@ -416,7 +417,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.DATE_TYPE ) { if ( i.dataType() != DataTypes.DATE_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.DATE_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -437,7 +438,7 @@ public class StructInstance implements ITypedStruct { ...@@ -437,7 +438,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.STRING_TYPE ) { if ( i.dataType() != DataTypes.STRING_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic get method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.STRING_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -458,7 +459,7 @@ public class StructInstance implements ITypedStruct { ...@@ -458,7 +459,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.BOOLEAN_TYPE ) { if ( i.dataType() != DataTypes.BOOLEAN_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.BOOLEAN_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -476,7 +477,7 @@ public class StructInstance implements ITypedStruct { ...@@ -476,7 +477,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.BYTE_TYPE ) { if ( i.dataType() != DataTypes.BYTE_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.BYTE_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -494,7 +495,7 @@ public class StructInstance implements ITypedStruct { ...@@ -494,7 +495,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.SHORT_TYPE ) { if ( i.dataType() != DataTypes.SHORT_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.SHORT_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -510,9 +511,9 @@ public class StructInstance implements ITypedStruct { ...@@ -510,9 +511,9 @@ public class StructInstance implements ITypedStruct {
throw new MetadataException(String.format("Unknown field %s for Struct %s", attrName, getTypeName())); throw new MetadataException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
} }
if ( i.dataType() != DataTypes.INT_TYPE ) { if ( i.dataType() != DataTypes.INT_TYPE && !(i.dataType() instanceof EnumType)) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.INT_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -530,7 +531,7 @@ public class StructInstance implements ITypedStruct { ...@@ -530,7 +531,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.LONG_TYPE ) { if ( i.dataType() != DataTypes.LONG_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.LONG_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -548,7 +549,7 @@ public class StructInstance implements ITypedStruct { ...@@ -548,7 +549,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.FLOAT_TYPE ) { if ( i.dataType() != DataTypes.FLOAT_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.FLOAT_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -566,7 +567,7 @@ public class StructInstance implements ITypedStruct { ...@@ -566,7 +567,7 @@ public class StructInstance implements ITypedStruct {
if (i.dataType() != DataTypes.DOUBLE_TYPE) { if (i.dataType() != DataTypes.DOUBLE_TYPE) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.DOUBLE_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -584,7 +585,7 @@ public class StructInstance implements ITypedStruct { ...@@ -584,7 +585,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.BIGINTEGER_TYPE ) { if ( i.dataType() != DataTypes.BIGINTEGER_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.BIGINTEGER_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -602,7 +603,7 @@ public class StructInstance implements ITypedStruct { ...@@ -602,7 +603,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.BIGDECIMAL_TYPE ) { if ( i.dataType() != DataTypes.BIGDECIMAL_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.BIGDECIMAL_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -620,7 +621,7 @@ public class StructInstance implements ITypedStruct { ...@@ -620,7 +621,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.DATE_TYPE ) { if ( i.dataType() != DataTypes.DATE_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.DATE_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
...@@ -638,7 +639,7 @@ public class StructInstance implements ITypedStruct { ...@@ -638,7 +639,7 @@ public class StructInstance implements ITypedStruct {
if ( i.dataType() != DataTypes.STRING_TYPE ) { if ( i.dataType() != DataTypes.STRING_TYPE ) {
throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method", throw new MetadataException(String.format("Field %s for Struct %s is not a %s, call generic set method",
attrName, getTypeName())); attrName, getTypeName(), DataTypes.STRING_TYPE.getName()));
} }
int pos = fieldMapping.fieldPos.get(attrName); int pos = fieldMapping.fieldPos.get(attrName);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment