Commit 760b3b96 by Venkat

Removed Control-Ms at end

parent 898e7317
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file * regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.hivetypes; package org.apache.hadoop.metadata.hivetypes;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.types.*; import org.apache.hadoop.metadata.types.*;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
public class HiveTypeSystem { public class HiveTypeSystem {
public static final Log LOG = LogFactory.getLog(HiveTypeSystem.class); public static final Log LOG = LogFactory.getLog(HiveTypeSystem.class);
public static final class Holder { public static final class Holder {
public static final HiveTypeSystem instance = new HiveTypeSystem(); public static final HiveTypeSystem instance = new HiveTypeSystem();
} }
private TypeSystem typeSystem; private TypeSystem typeSystem;
private boolean valid = false; private boolean valid = false;
public enum DefinedTypes { public enum DefinedTypes {
// Enums // Enums
HIVE_OBJECTTYPE, HIVE_OBJECTTYPE,
HIVE_PRINCIPALTYPE, HIVE_PRINCIPALTYPE,
HIVE_RESOURCETYPE, HIVE_RESOURCETYPE,
HIVE_FUNCTIONTYPE, HIVE_FUNCTIONTYPE,
// Structs // Structs
HIVE_SERDE, HIVE_SERDE,
HIVE_STORAGEDESC, HIVE_STORAGEDESC,
HIVE_SKEWEDINFO, HIVE_SKEWEDINFO,
HIVE_ORDER, HIVE_ORDER,
HIVE_RESOURCEURI, HIVE_RESOURCEURI,
// Classes // Classes
HIVE_DB, HIVE_DB,
HIVE_TABLE, HIVE_TABLE,
HIVE_COLUMN, HIVE_COLUMN,
HIVE_PARTITION, HIVE_PARTITION,
HIVE_INDEX, HIVE_INDEX,
HIVE_FUNCTION, HIVE_FUNCTION,
HIVE_ROLE, HIVE_ROLE,
HIVE_TYPE, HIVE_TYPE,
//HIVE_VIEW, //HIVE_VIEW,
} }
private Map<String, HierarchicalTypeDefinition<ClassType>> classTypeDefinitions; private Map<String, HierarchicalTypeDefinition<ClassType>> classTypeDefinitions;
private Map<String, EnumTypeDefinition> enumTypeDefinitionMap; private Map<String, EnumTypeDefinition> enumTypeDefinitionMap;
private Map<String, StructTypeDefinition> structTypeDefinitionMap; private Map<String, StructTypeDefinition> structTypeDefinitionMap;
private DataTypes.MapType mapStrToStrMap; private DataTypes.MapType mapStrToStrMap;
private DataTypes.ArrayType strArrayType; private DataTypes.ArrayType strArrayType;
private Map<String, IDataType> typeMap; private Map<String, IDataType> typeMap;
private List<IDataType> enumTypes; private List<IDataType> enumTypes;
private static Multiplicity ZeroOrMore = new Multiplicity(0, Integer.MAX_VALUE, true); private static Multiplicity ZeroOrMore = new Multiplicity(0, Integer.MAX_VALUE, true);
private HiveTypeSystem() { private HiveTypeSystem() {
classTypeDefinitions = new HashMap<>(); classTypeDefinitions = new HashMap<>();
enumTypeDefinitionMap = new HashMap<>(); enumTypeDefinitionMap = new HashMap<>();
structTypeDefinitionMap = new HashMap<>(); structTypeDefinitionMap = new HashMap<>();
typeMap = new HashMap<>(); typeMap = new HashMap<>();
enumTypes = new ArrayList<>(); enumTypes = new ArrayList<>();
} }
private void initialize() throws MetadataException { private void initialize() throws MetadataException {
LOG.info("Initializing the Hive Typesystem"); LOG.info("Initializing the Hive Typesystem");
typeSystem = TypeSystem.getInstance(); typeSystem = TypeSystem.getInstance();
mapStrToStrMap = mapStrToStrMap =
typeSystem.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE); typeSystem.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE);
strArrayType = typeSystem.defineArrayType(DataTypes.STRING_TYPE); strArrayType = typeSystem.defineArrayType(DataTypes.STRING_TYPE);
createHiveObjectTypeEnum(); createHiveObjectTypeEnum();
createHivePrincipalTypeEnum(); createHivePrincipalTypeEnum();
createFunctionTypeEnum(); createFunctionTypeEnum();
createResourceTypeEnum(); createResourceTypeEnum();
createSerDeStruct(); createSerDeStruct();
//createSkewedInfoStruct(); //createSkewedInfoStruct();
createOrderStruct(); createOrderStruct();
createResourceUriStruct(); createResourceUriStruct();
createStorageDescStruct(); createStorageDescStruct();
createDBClass(); createDBClass();
createTypeClass(); createTypeClass();
createColumnClass(); createColumnClass();
createPartitionClass(); createPartitionClass();
createTableClass(); createTableClass();
createIndexClass(); createIndexClass();
createFunctionClass(); createFunctionClass();
createRoleClass(); createRoleClass();
for (EnumTypeDefinition def : getEnumTypeDefinitions()) { for (EnumTypeDefinition def : getEnumTypeDefinitions()) {
enumTypes.add(typeSystem.defineEnumType(def)); enumTypes.add(typeSystem.defineEnumType(def));
} }
typeMap.putAll( typeMap.putAll(
typeSystem.defineTypes(getStructTypeDefinitions(), getTraitTypeDefinitions(), getClassTypeDefinitions())); typeSystem.defineTypes(getStructTypeDefinitions(), getTraitTypeDefinitions(), getClassTypeDefinitions()));
valid = true; valid = true;
} }
public synchronized static HiveTypeSystem getInstance() throws MetadataException { public synchronized static HiveTypeSystem getInstance() throws MetadataException {
HiveTypeSystem hs = Holder.instance; HiveTypeSystem hs = Holder.instance;
if (hs.valid) { if (hs.valid) {
LOG.info("Returning pre-initialized HiveTypeSystem singleton"); LOG.info("Returning pre-initialized HiveTypeSystem singleton");
return hs; return hs;
} }
hs.initialize(); hs.initialize();
return hs; return hs;
} }
public IDataType getDataType(String typeName) { public IDataType getDataType(String typeName) {
return typeMap.get(typeName); return typeMap.get(typeName);
} }
public ImmutableList<HierarchicalType> getHierarchicalTypeDefinitions() { public ImmutableList<HierarchicalType> getHierarchicalTypeDefinitions() {
if (valid) { if (valid) {
return ImmutableList.of( return ImmutableList.of(
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_DB.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_DB.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_TABLE.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_TABLE.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_COLUMN.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_COLUMN.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_PARTITION.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_PARTITION.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_INDEX.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_INDEX.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_FUNCTION.name()), (HierarchicalType) typeMap.get(DefinedTypes.HIVE_FUNCTION.name()),
(HierarchicalType) typeMap.get(DefinedTypes.HIVE_ROLE.name()) (HierarchicalType) typeMap.get(DefinedTypes.HIVE_ROLE.name())
); );
} else { } else {
return ImmutableList.of(); return ImmutableList.of();
} }
} }
public ImmutableList<EnumTypeDefinition> getEnumTypeDefinitions() { public ImmutableList<EnumTypeDefinition> getEnumTypeDefinitions() {
return ImmutableList.copyOf(enumTypeDefinitionMap.values()); return ImmutableList.copyOf(enumTypeDefinitionMap.values());
} }
public ImmutableList<StructTypeDefinition> getStructTypeDefinitions() { public ImmutableList<StructTypeDefinition> getStructTypeDefinitions() {
return ImmutableList.copyOf(structTypeDefinitionMap.values()); return ImmutableList.copyOf(structTypeDefinitionMap.values());
} }
public ImmutableList<HierarchicalTypeDefinition<ClassType>> getClassTypeDefinitions() { public ImmutableList<HierarchicalTypeDefinition<ClassType>> getClassTypeDefinitions() {
return ImmutableList.copyOf(classTypeDefinitions.values()); return ImmutableList.copyOf(classTypeDefinitions.values());
} }
public ImmutableList<HierarchicalTypeDefinition<TraitType>> getTraitTypeDefinitions() { public ImmutableList<HierarchicalTypeDefinition<TraitType>> getTraitTypeDefinitions() {
return ImmutableList.of(); return ImmutableList.of();
} }
private void createHiveObjectTypeEnum() throws MetadataException { private void createHiveObjectTypeEnum() throws MetadataException {
EnumValue values[] = { EnumValue values[] = {
new EnumValue("GLOBAL", 1), new EnumValue("GLOBAL", 1),
new EnumValue("DATABASE", 2), new EnumValue("DATABASE", 2),
new EnumValue("TABLE", 3), new EnumValue("TABLE", 3),
new EnumValue("PARTITION", 4), new EnumValue("PARTITION", 4),
new EnumValue("COLUMN", 5), new EnumValue("COLUMN", 5),
}; };
EnumTypeDefinition definition = new EnumTypeDefinition( EnumTypeDefinition definition = new EnumTypeDefinition(
DefinedTypes.HIVE_OBJECTTYPE.name(), values); DefinedTypes.HIVE_OBJECTTYPE.name(), values);
enumTypeDefinitionMap.put(DefinedTypes.HIVE_OBJECTTYPE.name(), definition); enumTypeDefinitionMap.put(DefinedTypes.HIVE_OBJECTTYPE.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_OBJECTTYPE.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_OBJECTTYPE.name());
} }
private void createHivePrincipalTypeEnum() throws MetadataException { private void createHivePrincipalTypeEnum() throws MetadataException {
EnumValue values[] = { EnumValue values[] = {
new EnumValue("USER", 1), new EnumValue("USER", 1),
new EnumValue("ROLE", 2), new EnumValue("ROLE", 2),
new EnumValue("GROUP", 3), new EnumValue("GROUP", 3),
}; };
EnumTypeDefinition definition = new EnumTypeDefinition( EnumTypeDefinition definition = new EnumTypeDefinition(
DefinedTypes.HIVE_PRINCIPALTYPE.name(), values); DefinedTypes.HIVE_PRINCIPALTYPE.name(), values);
enumTypeDefinitionMap.put(DefinedTypes.HIVE_PRINCIPALTYPE.name(), definition); enumTypeDefinitionMap.put(DefinedTypes.HIVE_PRINCIPALTYPE.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_PRINCIPALTYPE.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_PRINCIPALTYPE.name());
} }
private void createFunctionTypeEnum() throws MetadataException { private void createFunctionTypeEnum() throws MetadataException {
EnumValue values[] = { EnumValue values[] = {
new EnumValue("JAVA", 1), new EnumValue("JAVA", 1),
}; };
EnumTypeDefinition definition = new EnumTypeDefinition( EnumTypeDefinition definition = new EnumTypeDefinition(
DefinedTypes.HIVE_FUNCTIONTYPE.name(), values); DefinedTypes.HIVE_FUNCTIONTYPE.name(), values);
enumTypeDefinitionMap.put(DefinedTypes.HIVE_FUNCTIONTYPE.name(), definition); enumTypeDefinitionMap.put(DefinedTypes.HIVE_FUNCTIONTYPE.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_FUNCTIONTYPE.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_FUNCTIONTYPE.name());
} }
private void createResourceTypeEnum() throws MetadataException { private void createResourceTypeEnum() throws MetadataException {
EnumValue values[] = { EnumValue values[] = {
new EnumValue("JAR", 1), new EnumValue("JAR", 1),
new EnumValue("FILE", 2), new EnumValue("FILE", 2),
new EnumValue("ARCHIVE", 3), new EnumValue("ARCHIVE", 3),
}; };
EnumTypeDefinition definition = new EnumTypeDefinition( EnumTypeDefinition definition = new EnumTypeDefinition(
DefinedTypes.HIVE_RESOURCETYPE.name(), values); DefinedTypes.HIVE_RESOURCETYPE.name(), values);
enumTypeDefinitionMap.put(DefinedTypes.HIVE_RESOURCETYPE.name(), definition); enumTypeDefinitionMap.put(DefinedTypes.HIVE_RESOURCETYPE.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_RESOURCETYPE.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_RESOURCETYPE.name());
} }
private void createSerDeStruct() throws MetadataException { private void createSerDeStruct() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("serializationLib", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("serializationLib", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null),
}; };
StructTypeDefinition definition = new StructTypeDefinition(DefinedTypes.HIVE_SERDE.name(), attributeDefinitions); StructTypeDefinition definition = new StructTypeDefinition(DefinedTypes.HIVE_SERDE.name(), attributeDefinitions);
structTypeDefinitionMap.put(DefinedTypes.HIVE_SERDE.name(), definition); structTypeDefinitionMap.put(DefinedTypes.HIVE_SERDE.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_SERDE.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_SERDE.name());
} }
/** Revisit later after nested array types are handled by the typesystem **/ /** Revisit later after nested array types are handled by the typesystem **/
/** /**
private void createSkewedInfoStruct() throws MetadataException { private void createSkewedInfoStruct() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("skewedColNames", String.format("array<%s>", DataTypes.STRING_TYPE.getName()), new AttributeDefinition("skewedColNames", String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
ZeroOrMore, false, null), ZeroOrMore, false, null),
new AttributeDefinition("skewedColValues", String.format("array<%s>", strArrayType.getName()), new AttributeDefinition("skewedColValues", String.format("array<%s>", strArrayType.getName()),
ZeroOrMore, false, null), ZeroOrMore, false, null),
new AttributeDefinition("skewedColValueLocationMaps", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("skewedColValueLocationMaps", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null),
}; };
StructTypeDefinition definition = new StructTypeDefinition(DefinedTypes.HIVE_SKEWEDINFO.name(), attributeDefinitions); StructTypeDefinition definition = new StructTypeDefinition(DefinedTypes.HIVE_SKEWEDINFO.name(), attributeDefinitions);
structTypeDefinitionMap.put(DefinedTypes.HIVE_SKEWEDINFO.name(), definition); structTypeDefinitionMap.put(DefinedTypes.HIVE_SKEWEDINFO.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_SKEWEDINFO.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_SKEWEDINFO.name());
} }
**/ **/
private void createOrderStruct() throws MetadataException { private void createOrderStruct() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("col", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("col", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("order", DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("order", DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED, false, null),
}; };
StructTypeDefinition definition = new StructTypeDefinition(DefinedTypes.HIVE_ORDER.name(), attributeDefinitions); StructTypeDefinition definition = new StructTypeDefinition(DefinedTypes.HIVE_ORDER.name(), attributeDefinitions);
structTypeDefinitionMap.put(DefinedTypes.HIVE_ORDER.name(), definition); structTypeDefinitionMap.put(DefinedTypes.HIVE_ORDER.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_ORDER.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_ORDER.name());
} }
private void createStorageDescStruct() throws MetadataException { private void createStorageDescStruct() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("cols", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()), Multiplicity.COLLECTION, false, null), new AttributeDefinition("cols", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()), Multiplicity.COLLECTION, false, null),
new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("outputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("outputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("numBuckets", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("numBuckets", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("serdeInfo", DefinedTypes.HIVE_SERDE.name(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("serdeInfo", DefinedTypes.HIVE_SERDE.name(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("bucketCols", String.format("array<%s>",DataTypes.STRING_TYPE.getName()), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("bucketCols", String.format("array<%s>",DataTypes.STRING_TYPE.getName()), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sortCols", String.format("array<%s>", DefinedTypes.HIVE_ORDER.name()), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("sortCols", String.format("array<%s>", DefinedTypes.HIVE_ORDER.name()), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null),
//new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.name(), Multiplicity.OPTIONAL, false, null), //new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.name(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("storedAsSubDirectories", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("storedAsSubDirectories", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
}; };
StructTypeDefinition definition = StructTypeDefinition definition =
new StructTypeDefinition(DefinedTypes.HIVE_STORAGEDESC.name(), attributeDefinitions); new StructTypeDefinition(DefinedTypes.HIVE_STORAGEDESC.name(), attributeDefinitions);
structTypeDefinitionMap.put(DefinedTypes.HIVE_STORAGEDESC.name(), definition); structTypeDefinitionMap.put(DefinedTypes.HIVE_STORAGEDESC.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_STORAGEDESC.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_STORAGEDESC.name());
} }
private void createResourceUriStruct() throws MetadataException { private void createResourceUriStruct() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("resourceType", DefinedTypes.HIVE_RESOURCETYPE.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("resourceType", DefinedTypes.HIVE_RESOURCETYPE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("uri", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("uri", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
}; };
StructTypeDefinition definition = new StructTypeDefinition(DefinedTypes.HIVE_RESOURCEURI.name(), attributeDefinitions); StructTypeDefinition definition = new StructTypeDefinition(DefinedTypes.HIVE_RESOURCEURI.name(), attributeDefinitions);
structTypeDefinitionMap.put(DefinedTypes.HIVE_RESOURCEURI.name(), definition); structTypeDefinitionMap.put(DefinedTypes.HIVE_RESOURCEURI.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_RESOURCEURI.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_RESOURCEURI.name());
} }
private void createDBClass() throws MetadataException { private void createDBClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerType", DefinedTypes.HIVE_PRINCIPALTYPE.name(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("ownerType", DefinedTypes.HIVE_PRINCIPALTYPE.name(), Multiplicity.OPTIONAL, false, null),
}; };
HierarchicalTypeDefinition<ClassType> definition = HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_DB.name(), new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_DB.name(),
null, attributeDefinitions); null, attributeDefinitions);
classTypeDefinitions.put(DefinedTypes.HIVE_DB.name(), definition); classTypeDefinitions.put(DefinedTypes.HIVE_DB.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_DB.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_DB.name());
} }
private void createTypeClass() throws MetadataException { private void createTypeClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("type1", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("type1", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("type2", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("type2", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("fields", String.format("array<%s>", new AttributeDefinition("fields", String.format("array<%s>",
DefinedTypes.HIVE_COLUMN.name()), Multiplicity.OPTIONAL, false, null), DefinedTypes.HIVE_COLUMN.name()), Multiplicity.OPTIONAL, false, null),
}; };
HierarchicalTypeDefinition<ClassType> definition = HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_TYPE.name(), new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_TYPE.name(),
null, attributeDefinitions); null, attributeDefinitions);
classTypeDefinitions.put(DefinedTypes.HIVE_TYPE.name(), definition); classTypeDefinitions.put(DefinedTypes.HIVE_TYPE.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_TYPE.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_TYPE.name());
} }
private void createColumnClass() throws MetadataException { private void createColumnClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
//new AttributeDefinition("type", DefinedTypes.HIVE_TYPE.name(), Multiplicity.REQUIRED, false, null), //new AttributeDefinition("type", DefinedTypes.HIVE_TYPE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("comment", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("comment", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
}; };
HierarchicalTypeDefinition<ClassType> definition = HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_COLUMN.name(), new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_COLUMN.name(),
null, attributeDefinitions); null, attributeDefinitions);
classTypeDefinitions.put(DefinedTypes.HIVE_COLUMN.name(), definition); classTypeDefinitions.put(DefinedTypes.HIVE_COLUMN.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_COLUMN.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_COLUMN.name());
} }
private void createPartitionClass() throws MetadataException { private void createPartitionClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("values", DataTypes.STRING_TYPE.getName(), Multiplicity.COLLECTION, false, null), new AttributeDefinition("values", DataTypes.STRING_TYPE.getName(), Multiplicity.COLLECTION, false, null),
new AttributeDefinition("dbName", DefinedTypes.HIVE_DB.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("dbName", DefinedTypes.HIVE_DB.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("tableName", DefinedTypes.HIVE_TABLE.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("tableName", DefinedTypes.HIVE_TABLE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null),
}; };
HierarchicalTypeDefinition<ClassType> definition = HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_PARTITION.name(), new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_PARTITION.name(),
null, attributeDefinitions); null, attributeDefinitions);
classTypeDefinitions.put(DefinedTypes.HIVE_PARTITION.name(), definition); classTypeDefinitions.put(DefinedTypes.HIVE_PARTITION.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_PARTITION.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_PARTITION.name());
} }
private void createTableClass() throws MetadataException { private void createTableClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("tableName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("tableName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("dbName", DefinedTypes.HIVE_DB.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("dbName", DefinedTypes.HIVE_DB.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("retention", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("retention", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("partitionKeys", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()), new AttributeDefinition("partitionKeys", String.format("array<%s>", DefinedTypes.HIVE_COLUMN.name()),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewExpandedText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("viewExpandedText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("tableType", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("tableType", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("temporary", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("temporary", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
}; };
HierarchicalTypeDefinition<ClassType> definition = HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_TABLE.name(), new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_TABLE.name(),
null, attributeDefinitions); null, attributeDefinitions);
classTypeDefinitions.put(DefinedTypes.HIVE_TABLE.name(), definition); classTypeDefinitions.put(DefinedTypes.HIVE_TABLE.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_TABLE.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_TABLE.name());
} }
private void createIndexClass() throws MetadataException { private void createIndexClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("indexName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("indexName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("indexHandleClass", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("indexHandleClass", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("dbName", DefinedTypes.HIVE_DB.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("dbName", DefinedTypes.HIVE_DB.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("lastAccessTime", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("origTableName", DefinedTypes.HIVE_TABLE.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("origTableName", DefinedTypes.HIVE_TABLE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("indexTableName", DefinedTypes.HIVE_TABLE.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("indexTableName", DefinedTypes.HIVE_TABLE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("sd", DefinedTypes.HIVE_STORAGEDESC.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("parameters", mapStrToStrMap.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("deferredRebuild", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("deferredRebuild", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
}; };
HierarchicalTypeDefinition<ClassType> definition = HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_INDEX.name(), new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_INDEX.name(),
null, attributeDefinitions); null, attributeDefinitions);
classTypeDefinitions.put(DefinedTypes.HIVE_INDEX.name(), definition); classTypeDefinitions.put(DefinedTypes.HIVE_INDEX.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_INDEX.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_INDEX.name());
} }
private void createFunctionClass() throws MetadataException { private void createFunctionClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("functionName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("functionName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("dbName", DefinedTypes.HIVE_DB.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("dbName", DefinedTypes.HIVE_DB.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("className", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("className", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerName", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null), new AttributeDefinition("ownerName", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("ownerType", DefinedTypes.HIVE_PRINCIPALTYPE.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("ownerType", DefinedTypes.HIVE_PRINCIPALTYPE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("functionType", DefinedTypes.HIVE_FUNCTIONTYPE.name(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("functionType", DefinedTypes.HIVE_FUNCTIONTYPE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("resourceUris", DefinedTypes.HIVE_RESOURCEURI.name(), Multiplicity.COLLECTION, false, null), new AttributeDefinition("resourceUris", DefinedTypes.HIVE_RESOURCEURI.name(), Multiplicity.COLLECTION, false, null),
}; };
HierarchicalTypeDefinition<ClassType> definition = HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_FUNCTION.name(), new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_FUNCTION.name(),
null, attributeDefinitions); null, attributeDefinitions);
classTypeDefinitions.put(DefinedTypes.HIVE_FUNCTION.name(), definition); classTypeDefinitions.put(DefinedTypes.HIVE_FUNCTION.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_FUNCTION.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_FUNCTION.name());
} }
private void createRoleClass() throws MetadataException { private void createRoleClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("roleName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("roleName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("createTime", DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null), new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
}; };
HierarchicalTypeDefinition<ClassType> definition = HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_ROLE.name(), new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.HIVE_ROLE.name(),
null, attributeDefinitions); null, attributeDefinitions);
classTypeDefinitions.put(DefinedTypes.HIVE_ROLE.name(), definition); classTypeDefinitions.put(DefinedTypes.HIVE_ROLE.name(), definition);
LOG.debug("Created definition for " + DefinedTypes.HIVE_ROLE.name()); LOG.debug("Created definition for " + DefinedTypes.HIVE_ROLE.name());
} }
} }
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file * regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.hivetypes; package org.apache.hadoop.metadata.hivetypes;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.storage.Id; import org.apache.hadoop.metadata.storage.Id;
import org.apache.hadoop.metadata.storage.memory.MemRepository; import org.apache.hadoop.metadata.storage.memory.MemRepository;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
public class HiveTypeSystemTest { public class HiveTypeSystemTest {
protected MemRepository mr; protected MemRepository mr;
protected HiveTypeSystem hts; protected HiveTypeSystem hts;
public static final Log LOG = LogFactory.getLog(HiveTypeSystemTest.class); public static final Log LOG = LogFactory.getLog(HiveTypeSystemTest.class);
@Before @Before
public void setup() throws MetadataException { public void setup() throws MetadataException {
TypeSystem ts = TypeSystem.getInstance(); TypeSystem ts = TypeSystem.getInstance();
ts.reset(); ts.reset();
mr = new MemRepository(ts); mr = new MemRepository(ts);
hts = HiveTypeSystem.getInstance(); hts = HiveTypeSystem.getInstance();
} }
@Test @Test
public void testHiveImport() throws MetaException, MetadataException { public void testHiveImport() throws MetaException, MetadataException {
HiveImporter himport = new HiveImporter(mr, hts, new HiveMetaStoreClient(new HiveConf())); HiveImporter himport = new HiveImporter(mr, hts, new HiveMetaStoreClient(new HiveConf()));
himport.importHiveMetadata(); himport.importHiveMetadata();
LOG.info("Defined instances"); LOG.info("Defined instances");
for (Id id : himport.getInstances()) { for (Id id : himport.getInstances()) {
ITypedReferenceableInstance instance = mr.get(id); ITypedReferenceableInstance instance = mr.get(id);
LOG.info(instance.toString()); LOG.info(instance.toString());
} }
} }
} }
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment