Commit 112dffb0 by Harish Butani

support json serde for types

parent 1143f69d
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.types; package org.apache.hadoop.metadata.types;
public final class AttributeDefinition { public final class AttributeDefinition {
...@@ -37,4 +38,32 @@ public final class AttributeDefinition { ...@@ -37,4 +38,32 @@ public final class AttributeDefinition {
this.isComposite = isComposite; this.isComposite = isComposite;
this.reverseAttributeName = reverseAttributeName; this.reverseAttributeName = reverseAttributeName;
} }
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AttributeDefinition that = (AttributeDefinition) o;
if (isComposite != that.isComposite) return false;
if (!dataTypeName.equals(that.dataTypeName)) return false;
if (!multiplicity.equals(that.multiplicity)) return false;
if (!name.equals(that.name)) return false;
if (reverseAttributeName != null ? !reverseAttributeName.equals(that.reverseAttributeName) : that
.reverseAttributeName != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = name.hashCode();
result = 31 * result + dataTypeName.hashCode();
result = 31 * result + multiplicity.hashCode();
result = 31 * result + (isComposite ? 1 : 0);
result = 31 * result + (reverseAttributeName != null ? reverseAttributeName.hashCode() : 0);
return result;
}
} }
...@@ -433,6 +433,15 @@ public class DataTypes { ...@@ -433,6 +433,15 @@ public class DataTypes {
static String ARRAY_TYPE_PREFIX = "array<"; static String ARRAY_TYPE_PREFIX = "array<";
static String ARRAY_TYPE_SUFFIX = ">"; static String ARRAY_TYPE_SUFFIX = ">";
public static String arrayTypeName(String elemTypeName) {
return String.format("%s%s%s", ARRAY_TYPE_PREFIX, elemTypeName, ARRAY_TYPE_SUFFIX);
}
public static String arrayTypeName(IDataType elemType) {
return arrayTypeName(elemType.getName());
}
public static class ArrayType extends AbstractDataType<ImmutableCollection<?>> { public static class ArrayType extends AbstractDataType<ImmutableCollection<?>> {
private IDataType elemType; private IDataType elemType;
...@@ -441,7 +450,7 @@ public class DataTypes { ...@@ -441,7 +450,7 @@ public class DataTypes {
public ArrayType(IDataType elemType) { public ArrayType(IDataType elemType) {
assert elemType != null; assert elemType != null;
this.elemType = elemType; this.elemType = elemType;
this.nm = String.format("%s%s%s", ARRAY_TYPE_PREFIX, elemType.getName(), ARRAY_TYPE_SUFFIX); this.nm = arrayTypeName(elemType);
} }
public IDataType getElemType() { public IDataType getElemType() {
...@@ -521,6 +530,16 @@ public class DataTypes { ...@@ -521,6 +530,16 @@ public class DataTypes {
static String MAP_TYPE_PREFIX = "map<"; static String MAP_TYPE_PREFIX = "map<";
static String MAP_TYPE_SUFFIX = ">"; static String MAP_TYPE_SUFFIX = ">";
public static String mapTypeName(String keyTypeName, String valueTypeName) {
return String.format("%s%s,%s%s", MAP_TYPE_PREFIX,
keyTypeName, valueTypeName, MAP_TYPE_SUFFIX);
}
public static String mapTypeName(IDataType keyType, IDataType valueType) {
return mapTypeName(keyType.getName(), valueType.getName());
}
public static class MapType extends AbstractDataType<ImmutableMap<?, ?>> { public static class MapType extends AbstractDataType<ImmutableMap<?, ?>> {
private IDataType keyType; private IDataType keyType;
...@@ -532,8 +551,7 @@ public class DataTypes { ...@@ -532,8 +551,7 @@ public class DataTypes {
assert valueType != null; assert valueType != null;
this.keyType = keyType; this.keyType = keyType;
this.valueType = valueType; this.valueType = valueType;
this.nm = String.format("%s%s,%s%s", MAP_TYPE_PREFIX, this.nm = mapTypeName(keyType, valueType);
keyType.getName(), valueType.getName(), MAP_TYPE_SUFFIX);
} }
public IDataType getKeyType() { public IDataType getKeyType() {
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.types;
import java.util.Arrays;
public final class EnumTypeDefinition {
public final String name;
public final EnumValue[] enumValues;
public EnumTypeDefinition(String name, EnumValue...enumValues) {
this.name = name;
this.enumValues = enumValues;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EnumTypeDefinition that = (EnumTypeDefinition) o;
if (!Arrays.equals(enumValues, that.enumValues)) return false;
if (!name.equals(that.name)) return false;
return true;
}
@Override
public int hashCode() {
int result = name.hashCode();
result = 31 * result + Arrays.hashCode(enumValues);
return result;
}
}
...@@ -20,14 +20,56 @@ package org.apache.hadoop.metadata.types; ...@@ -20,14 +20,56 @@ package org.apache.hadoop.metadata.types;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.util.Collection;
public class HierarchicalTypeDefinition<T extends HierarchicalType> extends StructTypeDefinition { public class HierarchicalTypeDefinition<T extends HierarchicalType> extends StructTypeDefinition {
public final ImmutableList<String> superTypes; public final ImmutableList<String> superTypes;
public final String hierarchicalMetaTypeName;
/**
* Used for json deserialization only
* @nopublic
* @param hierarchicalMetaTypeName
* @param typeName
* @param superTypes
* @param attributeDefinitions
* @throws ClassNotFoundException
*/
public HierarchicalTypeDefinition(String hierarchicalMetaTypeName,
String typeName, String[] superTypes,
AttributeDefinition[] attributeDefinitions) throws ClassNotFoundException {
this((Class<T>) Class.forName(hierarchicalMetaTypeName),
typeName, ImmutableList.copyOf(superTypes), attributeDefinitions);
}
public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType, public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType,
String typeName, ImmutableList<String> superTypes, String typeName, ImmutableList<String> superTypes,
AttributeDefinition[] attributeDefinitions) { AttributeDefinition[] attributeDefinitions) {
super(typeName, attributeDefinitions); super(typeName, attributeDefinitions);
hierarchicalMetaTypeName = hierarchicalMetaType.getName();
this.superTypes = superTypes == null ? ImmutableList.<String>of() : superTypes; this.superTypes = superTypes == null ? ImmutableList.<String>of() : superTypes;
} }
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
HierarchicalTypeDefinition that = (HierarchicalTypeDefinition) o;
if (!hierarchicalMetaTypeName.equals(that.hierarchicalMetaTypeName)) return false;
if (!superTypes.equals(that.superTypes)) return false;
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + superTypes.hashCode();
result = 31 * result + hierarchicalMetaTypeName.hashCode();
return result;
}
} }
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.types; package org.apache.hadoop.metadata.types;
import com.google.common.collect.ImmutableList; import java.util.Arrays;
public class StructTypeDefinition { public class StructTypeDefinition {
...@@ -12,4 +30,24 @@ public class StructTypeDefinition { ...@@ -12,4 +30,24 @@ public class StructTypeDefinition {
this.typeName = typeName; this.typeName = typeName;
this.attributeDefinitions = attributeDefinitions; this.attributeDefinitions = attributeDefinitions;
} }
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
StructTypeDefinition that = (StructTypeDefinition) o;
if (!Arrays.equals(attributeDefinitions, that.attributeDefinitions)) return false;
if (!typeName.equals(that.typeName)) return false;
return true;
}
@Override
public int hashCode() {
int result = typeName.hashCode();
result = 31 * result + Arrays.hashCode(attributeDefinitions);
return result;
}
} }
...@@ -111,7 +111,7 @@ public class TypeSystem { ...@@ -111,7 +111,7 @@ public class TypeSystem {
return getDataType(ClassType.class, classDef.typeName); return getDataType(ClassType.class, classDef.typeName);
} }
public Map<String, IDataType> defineTraitTypes(HierarchicalTypeDefinition<TraitType>... traitDefs) public Map<String, IDataType> defineTraitTypes(HierarchicalTypeDefinition<TraitType>...traitDefs)
throws MetadataException { throws MetadataException {
TransientTypeSystem transientTypes = new TransientTypeSystem(ImmutableList.<StructTypeDefinition>of(), TransientTypeSystem transientTypes = new TransientTypeSystem(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>copyOf(traitDefs), ImmutableList.<HierarchicalTypeDefinition<TraitType>>copyOf(traitDefs),
...@@ -145,12 +145,16 @@ public class TypeSystem { ...@@ -145,12 +145,16 @@ public class TypeSystem {
} }
public EnumType defineEnumType(String name, EnumValue...values) throws MetadataException { public EnumType defineEnumType(String name, EnumValue...values) throws MetadataException {
assert name != null; return defineEnumType(new EnumTypeDefinition(name, values));
if (types.containsKey(name)) { }
throw new MetadataException(String.format("Redefinition of type %s not supported", name));
public EnumType defineEnumType(EnumTypeDefinition eDef) throws MetadataException {
assert eDef.name != null;
if (types.containsKey(eDef.name)) {
throw new MetadataException(String.format("Redefinition of type %s not supported", eDef.name));
} }
EnumType eT = new EnumType(this, name, values); EnumType eT = new EnumType(this, eDef.name, eDef.enumValues);
types.put(name, eT); types.put(eDef.name, eT);
return eT; return eT;
} }
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.json
import com.google.common.collect.ImmutableList
import org.apache.hadoop.metadata.MetadataException
import org.apache.hadoop.metadata.types.DataTypes.{ArrayType, MapType, TypeCategory}
import org.apache.hadoop.metadata.types._
import org.json4s.JsonAST.JString
import org.json4s._
import org.json4s.native.Serialization._
case class TypesDef(enumTypes: Seq[EnumTypeDefinition],
structTypes: Seq[StructTypeDefinition],
traitTypes: Seq[HierarchicalTypeDefinition[TraitType]],
classTypes: Seq[HierarchicalTypeDefinition[ClassType]])
/**
* Module for serializing to/from Json.
*
* @example {{{
* val j = TypesSerialization.toJson(typeSystem, "Employee", "Person", "Department", "SecurityClearance")
*
* val typesDef = TypesSerialization.fromJson(jsonStr)
* typesDef.enumTypes.foreach( typeSystem.defineEnumType(_))
typeSystem.defineTypes(ImmutableList.copyOf(typesDef.structTypes.toArray),
ImmutableList.copyOf(typesDef.traitTypes.toArray),
ImmutableList.copyOf(typesDef.classTypes.toArray)
)
* }}}
*
* @todo doesn't traverse includes directives. Includes are parsed into
* [[org.apache.hadoop.metadata.tools.thrift.IncludeDef IncludeDef]] structures
* but are not traversed.
* @todo mixing in [[scala.util.parsing.combinator.PackratParsers PackratParsers]] is a placeholder. Need to
* change specific grammar rules to `lazy val` and `Parser[Elem]` to `PackratParser[Elem]`. Will do based on
* performance analysis.
* @todo Error reporting
*/
object TypesSerialization {
def toJson(ts : TypeSystem, typNames : String*) : String = {
toJson(ts, (typ : IDataType[_]) => typNames.contains(typ.getName))
}
def toJson(ts : TypeSystem, export : IDataType[_] => Boolean) : String = {
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new MultiplicitySerializer
val typsDef = convertToTypesDef(ts, export)
writePretty(typsDef)
}
def fromJson(jsonStr : String) : TypesDef = {
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new MultiplicitySerializer
read[TypesDef](jsonStr)
}
private def convertAttributeInfoToAttributeDef(aInfo : AttributeInfo) = {
new AttributeDefinition(aInfo.name, aInfo.dataType().getName, aInfo.multiplicity,
aInfo.isComposite, aInfo.reverseAttributeName)
}
private def convertEnumTypeToEnumTypeDef(et : EnumType) = {
import scala.collection.JavaConversions._
val eVals :Seq[EnumValue] = et.valueMap.values().toSeq
new EnumTypeDefinition(et.name, eVals:_*)
}
private def convertStructTypeToStructDef(st : StructType) : StructTypeDefinition = {
import scala.collection.JavaConversions._
val aDefs : Iterable[AttributeDefinition] =
st.fieldMapping.fields.values().map(convertAttributeInfoToAttributeDef(_))
new StructTypeDefinition(st.name, aDefs.toArray)
}
private def convertTraitTypeToHierarchicalTypeDefintion(tt : TraitType) : HierarchicalTypeDefinition[TraitType] = {
import scala.collection.JavaConversions._
val aDefs : Iterable[AttributeDefinition] =
tt.immediateAttrs.map(convertAttributeInfoToAttributeDef(_))
new HierarchicalTypeDefinition[TraitType](classOf[TraitType], tt.name, tt.superTypes, aDefs.toArray)
}
private def convertClassTypeToHierarchicalTypeDefintion(tt : ClassType) : HierarchicalTypeDefinition[ClassType] = {
import scala.collection.JavaConversions._
val aDefs : Iterable[AttributeDefinition] =
tt.immediateAttrs.map(convertAttributeInfoToAttributeDef(_))
new HierarchicalTypeDefinition[ClassType](classOf[ClassType], tt.name, tt.superTypes, aDefs.toArray)
}
def convertToTypesDef(ts: TypeSystem, export : IDataType[_] => Boolean) : TypesDef = {
import scala.collection.JavaConversions._
var enumTypes : Seq[EnumTypeDefinition] = Nil
var structTypes : Seq[StructTypeDefinition] = Nil
var traitTypes : Seq[HierarchicalTypeDefinition[TraitType]] = Nil
var classTypes : Seq[HierarchicalTypeDefinition[ClassType]] = Nil
def toTyp(nm : String) = ts.getDataType(classOf[IDataType[_]], nm)
val typs : Iterable[IDataType[_]] = ts.getTypeNames.map(toTyp(_)).filter {(typ : IDataType[_]) =>
!(typ.getTypeCategory eq TypeCategory.PRIMITIVE) && export(typ)
}
typs.foreach {
case typ : ArrayType => ()
case typ : MapType => ()
case typ : EnumType => enumTypes = enumTypes :+ convertEnumTypeToEnumTypeDef(typ)
case typ : StructType => structTypes = structTypes :+ convertStructTypeToStructDef(typ)
case typ : TraitType => traitTypes = traitTypes :+ convertTraitTypeToHierarchicalTypeDefintion(typ)
case typ : ClassType => classTypes = classTypes :+ convertClassTypeToHierarchicalTypeDefintion(typ)
}
TypesDef(enumTypes, structTypes, traitTypes, classTypes)
}
}
class MultiplicitySerializer extends CustomSerializer[Multiplicity](format => ( {
case JString(m) => m match {
case "optional" => Multiplicity.OPTIONAL
case "required" => Multiplicity.REQUIRED
case "collection" => Multiplicity.COLLECTION
case "set" => Multiplicity.SET
}
}, {
case m : Multiplicity => JString( m match {
case Multiplicity.OPTIONAL => "optional"
case Multiplicity.REQUIRED => "required"
case Multiplicity.COLLECTION => "collection"
case Multiplicity.SET => "set"
}
)
}
))
trait TypeHelpers {
def requiredAttr(name: String, dataType: IDataType[_]) =
new AttributeDefinition(name, dataType.getName, Multiplicity.REQUIRED, false, null)
def optionalAttr(name: String, dataTypeName: String) =
new AttributeDefinition(name, dataTypeName, Multiplicity.OPTIONAL, false, null)
def optionalAttr(name: String, dataType: IDataType[_]) =
new AttributeDefinition(name, dataType.getName, Multiplicity.OPTIONAL, false, null)
def structDef(name : String, attrs : AttributeDefinition*) = {
new StructTypeDefinition(name, attrs.toArray)
}
def defineTraits(ts: TypeSystem, tDefs: HierarchicalTypeDefinition[TraitType]*) = {
ts.defineTraitTypes(tDefs:_*)
}
def createTraitTypeDef(name: String, superTypes: Seq[String], attrDefs: AttributeDefinition*):
HierarchicalTypeDefinition[TraitType] = {
val sts = ImmutableList.copyOf(superTypes.toArray)
return new HierarchicalTypeDefinition[TraitType](classOf[TraitType], name,
sts, attrDefs.toArray)
}
def createClassTypeDef(name: String, superTypes: Seq[String], attrDefs: AttributeDefinition*):
HierarchicalTypeDefinition[ClassType] = {
val sts = ImmutableList.copyOf(superTypes.toArray)
return new HierarchicalTypeDefinition[ClassType](classOf[ClassType], name,
sts, attrDefs.toArray)
}
@throws(classOf[MetadataException])
def defineClassType(ts : TypeSystem, classDef: HierarchicalTypeDefinition[ClassType]): ClassType = {
ts.defineTypes(ImmutableList.of[StructTypeDefinition],
ImmutableList.of[HierarchicalTypeDefinition[TraitType]],
ImmutableList.of[HierarchicalTypeDefinition[ClassType]](classDef))
return ts.getDataType(classOf[ClassType], classDef.typeName)
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.json
import com.google.common.collect.ImmutableList
import org.apache.hadoop.metadata.{MetadataException, BaseTest}
import org.apache.hadoop.metadata.types._
import org.junit.{Assert, Test}
class TypesSerializationTest extends BaseTest with TypeHelpers {
@Test def test1: Unit = {
val ts = ms.getTypeSystem
val sDef = structDef("ts1", requiredAttr("a", DataTypes.INT_TYPE),
optionalAttr("b", DataTypes.BOOLEAN_TYPE),
optionalAttr("c", DataTypes.BYTE_TYPE),
optionalAttr("d", DataTypes.SHORT_TYPE),
optionalAttr("e", DataTypes.INT_TYPE),
optionalAttr("f", DataTypes.INT_TYPE),
optionalAttr("g", DataTypes.LONG_TYPE),
optionalAttr("h", DataTypes.FLOAT_TYPE),
optionalAttr("i", DataTypes.DOUBLE_TYPE),
optionalAttr("j", DataTypes.BIGINTEGER_TYPE),
optionalAttr("k", DataTypes.BIGDECIMAL_TYPE),
optionalAttr("l", DataTypes.DATE_TYPE),
optionalAttr("m", DataTypes.arrayTypeName(DataTypes.INT_TYPE)),
optionalAttr("n", DataTypes.arrayTypeName(DataTypes.BIGDECIMAL_TYPE)),
optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)))
ts.defineTypes(ImmutableList.of[StructTypeDefinition](sDef),
ImmutableList.of[HierarchicalTypeDefinition[TraitType]],
ImmutableList.of[HierarchicalTypeDefinition[ClassType]]
)
val A: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("A", List(),
requiredAttr("a", DataTypes.INT_TYPE),
optionalAttr("b", DataTypes.BOOLEAN_TYPE),
optionalAttr("c", DataTypes.BYTE_TYPE),
optionalAttr("d", DataTypes.SHORT_TYPE))
val B: HierarchicalTypeDefinition[TraitType] =
createTraitTypeDef("B", Seq("A"), optionalAttr("b", DataTypes.BOOLEAN_TYPE))
val C: HierarchicalTypeDefinition[TraitType] =
createTraitTypeDef("C", Seq("A"), optionalAttr("c", DataTypes.BYTE_TYPE))
val D: HierarchicalTypeDefinition[TraitType] =
createTraitTypeDef("D", Seq("B", "C"), optionalAttr("d", DataTypes.SHORT_TYPE))
defineTraits(ts, A, B, C, D)
ts.defineEnumType("HiveObjectType",
new EnumValue("GLOBAL", 1),
new EnumValue("DATABASE", 2),
new EnumValue("TABLE", 3),
new EnumValue("PARTITION", 4),
new EnumValue("COLUMN", 5))
ts.defineEnumType("PrincipalType",
new EnumValue("USER", 1),
new EnumValue("ROLE", 2),
new EnumValue("GROUP", 3))
ts.defineEnumType("TxnState",
new EnumValue("COMMITTED", 1),
new EnumValue("ABORTED", 2),
new EnumValue("OPEN", 3))
ts.defineEnumType("LockLevel",
new EnumValue("DB", 1),
new EnumValue("TABLE", 2),
new EnumValue("PARTITION", 3))
defineClassType(ts, createClassTypeDef("t4", List(),
requiredAttr("a", DataTypes.INT_TYPE),
optionalAttr("b", DataTypes.BOOLEAN_TYPE),
optionalAttr("c", DataTypes.BYTE_TYPE),
optionalAttr("d", DataTypes.SHORT_TYPE),
optionalAttr("enum1", ts.getDataType(classOf[EnumType], "HiveObjectType")),
optionalAttr("e", DataTypes.INT_TYPE),
optionalAttr("f", DataTypes.INT_TYPE),
optionalAttr("g", DataTypes.LONG_TYPE),
optionalAttr("enum2", ts.getDataType(classOf[EnumType], "PrincipalType")),
optionalAttr("h", DataTypes.FLOAT_TYPE),
optionalAttr("i", DataTypes.DOUBLE_TYPE),
optionalAttr("j", DataTypes.BIGINTEGER_TYPE),
optionalAttr("k", DataTypes.BIGDECIMAL_TYPE),
optionalAttr("enum3", ts.getDataType(classOf[EnumType], "TxnState")),
optionalAttr("l", DataTypes.DATE_TYPE),
optionalAttr("m", ts.defineArrayType(DataTypes.INT_TYPE)),
optionalAttr("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
optionalAttr("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
optionalAttr("enum4", ts.getDataType(classOf[EnumType], "LockLevel"))))
val deptTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Department", List(),
requiredAttr("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, true, "department"))
val personTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Person", List(),
requiredAttr("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates")
)
val managerTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Manager", List("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager")
)
val securityClearanceTypeDef: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("SecurityClearance", List(),
requiredAttr("level", DataTypes.INT_TYPE)
)
ts.defineTypes(ImmutableList.of[StructTypeDefinition],
ImmutableList.of[HierarchicalTypeDefinition[TraitType]](securityClearanceTypeDef),
ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef))
val ser = TypesSerialization.toJson(ts, _ => true)
val typesDef1 = TypesSerialization.fromJson(ser)
val ts1 = new TypeSystem()
typesDef1.enumTypes.foreach( ts1.defineEnumType(_))
ts1.defineTypes(ImmutableList.copyOf(typesDef1.structTypes.toArray),
ImmutableList.copyOf(typesDef1.traitTypes.toArray),
ImmutableList.copyOf(typesDef1.classTypes.toArray)
)
val ser2 = TypesSerialization.toJson(ts1, _ => true)
val typesDef2 = TypesSerialization.fromJson(ser2)
Assert.assertEquals(typesDef1, typesDef2)
}
}
trait TypeHelpers {
def requiredAttr(name: String, dataType: IDataType[_]) =
new AttributeDefinition(name, dataType.getName, Multiplicity.REQUIRED, false, null)
def optionalAttr(name: String, dataTypeName: String) =
new AttributeDefinition(name, dataTypeName, Multiplicity.OPTIONAL, false, null)
def optionalAttr(name: String, dataType: IDataType[_]) =
new AttributeDefinition(name, dataType.getName, Multiplicity.OPTIONAL, false, null)
def structDef(name : String, attrs : AttributeDefinition*) = {
new StructTypeDefinition(name, attrs.toArray)
}
def defineTraits(ts: TypeSystem, tDefs: HierarchicalTypeDefinition[TraitType]*) = {
ts.defineTraitTypes(tDefs:_*)
}
def createTraitTypeDef(name: String, superTypes: Seq[String], attrDefs: AttributeDefinition*):
HierarchicalTypeDefinition[TraitType] = {
val sts = ImmutableList.copyOf(superTypes.toArray)
return new HierarchicalTypeDefinition[TraitType](classOf[TraitType], name,
sts, attrDefs.toArray)
}
def createClassTypeDef(name: String, superTypes: Seq[String], attrDefs: AttributeDefinition*):
HierarchicalTypeDefinition[ClassType] = {
val sts = ImmutableList.copyOf(superTypes.toArray)
return new HierarchicalTypeDefinition[ClassType](classOf[ClassType], name,
sts, attrDefs.toArray)
}
@throws(classOf[MetadataException])
def defineClassType(ts : TypeSystem, classDef: HierarchicalTypeDefinition[ClassType]): ClassType = {
ts.defineTypes(ImmutableList.of[StructTypeDefinition],
ImmutableList.of[HierarchicalTypeDefinition[TraitType]],
ImmutableList.of[HierarchicalTypeDefinition[ClassType]](classDef))
return ts.getDataType(classOf[ClassType], classDef.typeName)
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment