Commit ed7d2bf0 by Harish Butani

InstanceSerialization supports serialization of Typed Instances

parent 215d7400
...@@ -20,6 +20,8 @@ package org.apache.hadoop.metadata.typesystem; ...@@ -20,6 +20,8 @@ package org.apache.hadoop.metadata.typesystem;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import java.util.Map;
/** /**
* Represents a Struct or Trait or Object. * Represents a Struct or Trait or Object.
*/ */
...@@ -31,4 +33,6 @@ public interface IInstance { ...@@ -31,4 +33,6 @@ public interface IInstance {
void set(String attrName, Object val) throws MetadataException; void set(String attrName, Object val) throws MetadataException;
Map<String, Object> getValuesMap() throws MetadataException;
} }
...@@ -56,7 +56,7 @@ public class Struct implements IStruct { ...@@ -56,7 +56,7 @@ public class Struct implements IStruct {
values.put(attrName, value); values.put(attrName, value);
} }
@InterfaceAudience.Private @Override
public Map<String, Object> getValuesMap() { public Map<String, Object> getValuesMap() {
return values; return values;
} }
......
...@@ -22,6 +22,9 @@ import org.apache.hadoop.metadata.MetadataException; ...@@ -22,6 +22,9 @@ import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.IStruct; import org.apache.hadoop.metadata.typesystem.IStruct;
import org.apache.hadoop.metadata.typesystem.types.DownCastFieldMapping; import org.apache.hadoop.metadata.typesystem.types.DownCastFieldMapping;
import java.util.HashMap;
import java.util.Map;
public class DownCastStructInstance implements IStruct { public class DownCastStructInstance implements IStruct {
public final String typeName; public final String typeName;
...@@ -49,6 +52,20 @@ public class DownCastStructInstance implements IStruct { ...@@ -49,6 +52,20 @@ public class DownCastStructInstance implements IStruct {
public void set(String attrName, Object val) throws MetadataException { public void set(String attrName, Object val) throws MetadataException {
fieldMapping.set(this, attrName, val); fieldMapping.set(this, attrName, val);
} }
/*
* Use only for json serialization
* @nonpublic
*/
@Override
public Map<String, Object> getValuesMap() throws MetadataException {
Map<String,Object> m = new HashMap<>();
for (String attr : fieldMapping.fieldNameMap.keySet()) {
m.put(attr, get(attr));
}
return m;
}
} }
...@@ -27,6 +27,7 @@ import org.apache.hadoop.metadata.typesystem.types.FieldMapping; ...@@ -27,6 +27,7 @@ import org.apache.hadoop.metadata.typesystem.types.FieldMapping;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.Date; import java.util.Date;
import java.util.Map;
import java.util.UUID; import java.util.UUID;
public class Id implements ITypedReferenceableInstance { public class Id implements ITypedReferenceableInstance {
...@@ -142,6 +143,11 @@ public class Id implements ITypedReferenceableInstance { ...@@ -142,6 +143,11 @@ public class Id implements ITypedReferenceableInstance {
return null; return null;
} }
@Override
public Map<String, Object> getValuesMap() throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
}
public void setNull(String attrName) throws MetadataException { public void setNull(String attrName) throws MetadataException {
set(attrName, null); set(attrName, null);
} }
......
...@@ -36,6 +36,7 @@ import org.apache.hadoop.metadata.typesystem.types.ValueConversionException; ...@@ -36,6 +36,7 @@ import org.apache.hadoop.metadata.typesystem.types.ValueConversionException;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.Date; import java.util.Date;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
public class StructInstance implements ITypedStruct { public class StructInstance implements ITypedStruct {
...@@ -233,6 +234,20 @@ public class StructInstance implements ITypedStruct { ...@@ -233,6 +234,20 @@ public class StructInstance implements ITypedStruct {
nullFlags[nullPos] = true; nullFlags[nullPos] = true;
} }
/*
* Use only for json serialization
* @nonpublic
*/
@Override
public Map<String, Object> getValuesMap() throws MetadataException {
Map<String,Object> m = new HashMap<>();
for (String attr : fieldMapping.fields.keySet()) {
m.put(attr, get(attr));
}
return m;
}
public boolean getBoolean(String attrName) throws MetadataException { public boolean getBoolean(String attrName) throws MetadataException {
AttributeInfo i = fieldMapping.fields.get(attrName); AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) { if (i == null) {
......
...@@ -266,17 +266,17 @@ object InstanceSerialization { ...@@ -266,17 +266,17 @@ object InstanceSerialization {
def asScala(v : Any) : Any = v match { def asScala(v : Any) : Any = v match {
case i : Id => _Id(i._getId(), i.getVersion, i.getClassName) case i : Id => _Id(i._getId(), i.getVersion, i.getClassName)
case r : Referenceable => { case r : IReferenceableInstance => {
val traits = r.getTraits.map { tName => val traits = r.getTraits.map { tName =>
val t = r.getTrait(tName).asInstanceOf[Struct] val t = r.getTrait(tName).asInstanceOf[IStruct]
(tName -> _Struct(t.getTypeName, asScala(t.getValuesMap).asInstanceOf[Map[String, AnyRef]])) (tName -> _Struct(t.getTypeName, asScala(t.getValuesMap).asInstanceOf[Map[String, AnyRef]]))
}.toMap }.toMap
_Reference(asScala(r.getId).asInstanceOf[_Id], _Reference(asScala(r.getId).asInstanceOf[_Id],
r.typeName, asScala(r.getValuesMap).asInstanceOf[Map[String, AnyRef]], r.getTypeName, asScala(r.getValuesMap).asInstanceOf[Map[String, AnyRef]],
asScala(r.getTraits).asInstanceOf[List[String]], asScala(r.getTraits).asInstanceOf[List[String]],
traits.asInstanceOf[Map[String, _Struct]]) traits.asInstanceOf[Map[String, _Struct]])
} }
case s : Struct => _Struct(s.typeName, asScala(s.getValuesMap).asInstanceOf[Map[String, AnyRef]]) case s : IStruct => _Struct(s.getTypeName, asScala(s.getValuesMap).asInstanceOf[Map[String, AnyRef]])
case l : java.util.List[_] => l.asScala.map(e => asScala(e)).toList case l : java.util.List[_] => l.asScala.map(e => asScala(e)).toList
case m : java.util.Map[_, _] => m.asScala.mapValues(v => asScala(v)).toMap case m : java.util.Map[_, _] => m.asScala.mapValues(v => asScala(v)).toMap
case _ => v case _ => v
...@@ -301,7 +301,7 @@ object InstanceSerialization { ...@@ -301,7 +301,7 @@ object InstanceSerialization {
writePretty(_s) writePretty(_s)
} }
def toJson(value: Struct, withBigDecimals : Boolean = false): String = { def toJson(value: IStruct, withBigDecimals : Boolean = false): String = {
_toJson(value, withBigDecimals) _toJson(value, withBigDecimals)
} }
......
...@@ -19,8 +19,7 @@ ...@@ -19,8 +19,7 @@
package org.apache.hadoop.metadata.typesystem.json package org.apache.hadoop.metadata.typesystem.json
import com.google.common.collect.ImmutableList import com.google.common.collect.ImmutableList
import org.apache.hadoop.metadata._ import org.apache.hadoop.metadata.typesystem.persistence.{ReferenceableInstance, StructInstance, Id}
import org.apache.hadoop.metadata.typesystem.persistence.{ReferenceableInstance, StructInstance}
import org.apache.hadoop.metadata.typesystem.types._ import org.apache.hadoop.metadata.typesystem.types._
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil
import org.apache.hadoop.metadata.typesystem.{ITypedReferenceableInstance, ITypedStruct, Referenceable, Struct} import org.apache.hadoop.metadata.typesystem.{ITypedReferenceableInstance, ITypedStruct, Referenceable, Struct}
...@@ -127,65 +126,7 @@ class SerializationTest extends BaseTest { ...@@ -127,65 +126,7 @@ class SerializationTest extends BaseTest {
Assert.assertEquals(ts1.toString, "{\n\td : \t1\n\tb : \ttrue\n\tc : \t1\n\ta : \t1\n\tA.B.D.b : \ttrue\n\tA.B.D.c : \t2\n\tA.B.D.d : \t2\n\tA.C.D.a : \t3\n\tA.C.D.b : \tfalse\n\tA.C.D.c : \t3\n\tA.C.D.d : \t3\n}") Assert.assertEquals(ts1.toString, "{\n\td : \t1\n\tb : \ttrue\n\tc : \t1\n\ta : \t1\n\tA.B.D.b : \ttrue\n\tA.B.D.c : \t2\n\tA.B.D.d : \t2\n\tA.C.D.a : \t3\n\tA.C.D.b : \tfalse\n\tA.C.D.c : \t3\n\tA.C.D.d : \t3\n}")
} }
@Test def testClass { def defineHRTypes(ts: TypeSystem) : Unit = {
val ts: TypeSystem = getTypeSystem
val deptTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Department",
ImmutableList.of[String],
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, true, "department"))
val personTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Person", ImmutableList.of[String],
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"))
val managerTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Manager", ImmutableList.of[String]("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager"))
val securityClearanceTypeDef: HierarchicalTypeDefinition[TraitType] =
TypesUtil.createTraitTypeDef("SecurityClearance", ImmutableList.of[String],
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE))
ts.defineTypes(ImmutableList.of[StructTypeDefinition],
ImmutableList.of[HierarchicalTypeDefinition[TraitType]](securityClearanceTypeDef),
ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef)
)
val hrDept: Referenceable = new Referenceable("Department")
val john: Referenceable = new Referenceable("Person")
val jane: Referenceable = new Referenceable("Manager", "SecurityClearance")
hrDept.set("name", "hr")
john.set("name", "John")
john.set("department", hrDept)
jane.set("name", "Jane")
jane.set("department", hrDept)
john.set("manager", jane)
hrDept.set("employees", ImmutableList.of[Referenceable](john, jane))
jane.set("subordinates", ImmutableList.of[Referenceable](john))
jane.getTrait("SecurityClearance").set("level", 1)
val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED)
println(s"HR Dept Object Graph:\n${hrDept2}\n")
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
val ser = swrite(hrDept2)
println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n")
println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
}
@Test def testReference {
val ts: TypeSystem = getTypeSystem
val deptTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef( val deptTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Department", "Department",
ImmutableList.of[String], ImmutableList.of[String],
...@@ -196,8 +137,7 @@ class SerializationTest extends BaseTest { ...@@ -196,8 +137,7 @@ class SerializationTest extends BaseTest {
"Person", ImmutableList.of[String], "Person", ImmutableList.of[String],
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"), new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates") new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"))
)
val managerTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef( val managerTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Manager", ImmutableList.of[String]("Person"), "Manager", ImmutableList.of[String]("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"), new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
...@@ -211,6 +151,9 @@ class SerializationTest extends BaseTest { ...@@ -211,6 +151,9 @@ class SerializationTest extends BaseTest {
ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef) ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef)
) )
}
def defineHRDept() : Referenceable = {
val hrDept: Referenceable = new Referenceable("Department") val hrDept: Referenceable = new Referenceable("Department")
val john: Referenceable = new Referenceable("Person") val john: Referenceable = new Referenceable("Person")
val jane: Referenceable = new Referenceable("Manager", "SecurityClearance") val jane: Referenceable = new Referenceable("Manager", "SecurityClearance")
...@@ -221,8 +164,36 @@ class SerializationTest extends BaseTest { ...@@ -221,8 +164,36 @@ class SerializationTest extends BaseTest {
jane.set("department", hrDept.getId) jane.set("department", hrDept.getId)
john.set("manager", jane.getId) john.set("manager", jane.getId)
hrDept.set("employees", ImmutableList.of[Referenceable](john, jane)) hrDept.set("employees", ImmutableList.of[Referenceable](john, jane))
jane.set("subordinates", ImmutableList.of[Referenceable](john)) jane.set("subordinates", ImmutableList.of[Id](john.getId))
jane.getTrait("SecurityClearance").set("level", 1) jane.getTrait("SecurityClearance").set("level", 1)
hrDept
}
@Test def testClass {
val ts: TypeSystem = getTypeSystem
defineHRTypes(ts)
val hrDept: Referenceable = defineHRDept()
val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED)
println(s"HR Dept Object Graph:\n${hrDept2}\n")
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
val ser = swrite(hrDept2)
println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n")
println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
}
@Test def testReference {
val ts: TypeSystem = getTypeSystem
defineHRTypes(ts)
val hrDept: Referenceable = defineHRDept()
val jsonStr = InstanceSerialization.toJson(hrDept) val jsonStr = InstanceSerialization.toJson(hrDept)
...@@ -242,4 +213,30 @@ class SerializationTest extends BaseTest { ...@@ -242,4 +213,30 @@ class SerializationTest extends BaseTest {
println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n") println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
} }
@Test def testReference2 {
val ts: TypeSystem = getTypeSystem
defineHRTypes(ts)
val hrDept: Referenceable = defineHRDept()
val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED)
val jsonStr = InstanceSerialization.toJson(hrDept2)
val hrDept3 = InstanceSerialization.fromJsonReferenceable(jsonStr)
val hrDept4: ITypedReferenceableInstance = deptType.convert(hrDept2, Multiplicity.REQUIRED)
println(s"HR Dept Object Graph:\n${hrDept4}\n")
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
val ser = swrite(hrDept4)
println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n")
println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
}
} }
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment