Commit 643b6489 by Harish Butani

introduce serialization for structs and referenceables

parent 858348b0
...@@ -20,13 +20,11 @@ package org.apache.hadoop.metadata.repository.memory; ...@@ -20,13 +20,11 @@ package org.apache.hadoop.metadata.repository.memory;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.hadoop.metadata.typesystem.*;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization$;
import org.apache.hadoop.metadata.typesystem.json.Serialization$; import org.apache.hadoop.metadata.typesystem.json.Serialization$;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$;
import org.apache.hadoop.metadata.repository.BaseTest; import org.apache.hadoop.metadata.repository.BaseTest;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.hadoop.metadata.typesystem.types.DataTypes;
...@@ -78,8 +76,8 @@ public class InstanceE2ETest extends BaseTest { ...@@ -78,8 +76,8 @@ public class InstanceE2ETest extends BaseTest {
return typeDefinitions; return typeDefinitions;
} }
protected ITypedReferenceableInstance createHiveTableInstance(TypeSystem typeSystem) protected Referenceable createHiveTableReferenceable()
throws MetadataException { throws MetadataException {
Referenceable databaseInstance = new Referenceable("hive_database"); Referenceable databaseInstance = new Referenceable("hive_database");
databaseInstance.set("name", "hive_database"); databaseInstance.set("name", "hive_database");
databaseInstance.set("description", "foo database"); databaseInstance.set("description", "foo database");
...@@ -95,8 +93,13 @@ public class InstanceE2ETest extends BaseTest { ...@@ -95,8 +93,13 @@ public class InstanceE2ETest extends BaseTest {
tableInstance.set("hive_fetl", traitInstance); tableInstance.set("hive_fetl", traitInstance);
return tableInstance;
}
protected ITypedReferenceableInstance createHiveTableInstance(TypeSystem typeSystem)
throws MetadataException {
ClassType tableType = typeSystem.getDataType(ClassType.class, "hive_table"); ClassType tableType = typeSystem.getDataType(ClassType.class, "hive_table");
return tableType.convert(tableInstance, Multiplicity.REQUIRED); return tableType.convert(createHiveTableReferenceable(), Multiplicity.REQUIRED);
} }
@Test @Test
...@@ -136,4 +139,27 @@ public class InstanceE2ETest extends BaseTest { ...@@ -136,4 +139,27 @@ public class InstanceE2ETest extends BaseTest {
i = Serialization$.MODULE$.fromJson(jsonStr); i = Serialization$.MODULE$.fromJson(jsonStr);
System.out.println(i); System.out.println(i);
} }
@Test
public void testInstanceSerialization() throws MetadataException {
TypeSystem ts = getTypeSystem();
createHiveTypes(ts);
Referenceable r = createHiveTableReferenceable();
String jsonStr = InstanceSerialization$.MODULE$.toJson(r, true);
Referenceable r1 = InstanceSerialization$.MODULE$.fromJsonReferenceable(jsonStr, true);
ClassType tableType = ts.getDataType(ClassType.class, "hive_table");
/* todo: fix deserialization, so following conver works
ITypedReferenceableInstance i = tableType.convert(r1, Multiplicity.REQUIRED);
jsonStr = Serialization$.MODULE$.toJson(i);
System.out.println(jsonStr);
i = Serialization$.MODULE$.fromJson(jsonStr);
System.out.println(i);
*/
}
} }
...@@ -22,6 +22,7 @@ import org.apache.hadoop.metadata.MetadataException; ...@@ -22,6 +22,7 @@ import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.BaseTest; import org.apache.hadoop.metadata.repository.BaseTest;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.hadoop.metadata.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization$;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructType; import org.apache.hadoop.metadata.typesystem.types.StructType;
import org.junit.Assert; import org.junit.Assert;
...@@ -82,4 +83,30 @@ public class StructTest extends BaseTest { ...@@ -82,4 +83,30 @@ public class StructTest extends BaseTest {
"}"); "}");
} }
@Test
public void testSerialization() throws MetadataException {
Struct s = createStruct();
String jsonStr = InstanceSerialization$.MODULE$.toJson(s, true);
Struct s1 = InstanceSerialization$.MODULE$.fromJsonStruct(jsonStr, true);
ITypedStruct ts = structType.convert(s1, Multiplicity.REQUIRED);
Assert.assertEquals(ts.toString(), "{\n" +
"\ta : \t1\n" +
"\tb : \ttrue\n" +
"\tc : \t1\n" +
"\td : \t2\n" +
"\te : \t1\n" +
"\tf : \t1\n" +
"\tg : \t1\n" +
"\th : \t1.0\n" +
"\ti : \t1.0\n" +
"\tj : \t1\n" +
"\tk : \t1\n" +
"\tl : \t2014-12-10\n" +
"\tm : \t[1, 1]\n" +
"\tn : \t[1.100000000000000088817841970012523233890533447265625, 1" +
".100000000000000088817841970012523233890533447265625]\n" +
"\to : \t{b=2.0, a=1.0}\n" +
"}");
}
} }
...@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList; ...@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.hadoop.metadata.typesystem.persistence.Id;
import java.util.List;
import java.util.Map; import java.util.Map;
/** /**
...@@ -58,6 +59,21 @@ public class Referenceable extends Struct implements IReferenceableInstance { ...@@ -58,6 +59,21 @@ public class Referenceable extends Struct implements IReferenceableInstance {
traits = ImmutableMap.of(); traits = ImmutableMap.of();
} }
/**
* @nopublic only use during deserialization
* @param guid
* @param typeName
* @param values
*/
public Referenceable(String guid, String typeName, Map<String, Object> values,
List<String> _traitNames,
Map<String, IStruct> _traits) {
super(typeName, values);
id = new Id(guid, 0, typeName);
traitNames = ImmutableList.copyOf(_traitNames);
traits = ImmutableMap.copyOf(_traits);
}
@Override @Override
public ImmutableList<String> getTraits() { public ImmutableList<String> getTraits() {
return traitNames; return traitNames;
......
...@@ -73,6 +73,18 @@ public class Id implements ITypedReferenceableInstance { ...@@ -73,6 +73,18 @@ public class Id implements ITypedReferenceableInstance {
.format("(type: %s, id: %s)", className, isUnassigned() ? "<unassigned>" : "" + id); .format("(type: %s, id: %s)", className, isUnassigned() ? "<unassigned>" : "" + id);
} }
public String getClassName() {
return className;
}
public int getVersion() {
return version;
}
public String _getId() {
return id;
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
......
...@@ -106,7 +106,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc ...@@ -106,7 +106,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
Referenceable r = null; Referenceable r = null;
Id id = null; Id id = null;
if (s.typeName != getName()) { if (!s.typeName.equals(getName())) {
/* /*
* If val is a subType instance; invoke convert on it. * If val is a subType instance; invoke convert on it.
*/ */
......
...@@ -53,7 +53,7 @@ public class TypedStructHandler { ...@@ -53,7 +53,7 @@ public class TypedStructHandler {
return ts; return ts;
} else if (val instanceof Struct) { } else if (val instanceof Struct) {
Struct s = (Struct) val; Struct s = (Struct) val;
if (s.typeName != structType.getName()) { if (!s.typeName.equals(structType.getName())) {
throw new ValueConversionException(structType, val); throw new ValueConversionException(structType, val);
} }
ITypedStruct ts = createInstance(); ITypedStruct ts = createInstance();
......
...@@ -18,6 +18,8 @@ ...@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.typesystem.json package org.apache.hadoop.metadata.typesystem.json
import java.text.SimpleDateFormat
import org.apache.hadoop.metadata.typesystem._ import org.apache.hadoop.metadata.typesystem._
import org.apache.hadoop.metadata.typesystem.persistence.{StructInstance, Id, ReferenceableInstance} import org.apache.hadoop.metadata.typesystem.persistence.{StructInstance, Id, ReferenceableInstance}
import org.apache.hadoop.metadata.typesystem.types.DataTypes.{ArrayType, MapType, TypeCategory} import org.apache.hadoop.metadata.typesystem.types.DataTypes.{ArrayType, MapType, TypeCategory}
...@@ -299,3 +301,83 @@ object Serialization { ...@@ -299,3 +301,83 @@ object Serialization {
read[StructInstance](jsonStr) read[StructInstance](jsonStr)
} }
} }
object InstanceSerialization {
case class _Id(id : String, version : Int, typeName : String)
case class _Struct(typeName : String, values : Map[String, AnyRef])
case class _Reference(id : _Id,
typeName : String,
values : Map[String, AnyRef],
traitNames : List[String],
traits : Map[String, _Struct])
def asJava(v : Any) : Any = v match {
case i : _Id => new Id(i.id, i.version, i.typeName)
case s : _Struct => new Struct(s.typeName, asJava(s.values).asInstanceOf[java.util.Map[String, Object]])
case r : _Reference => {
new Referenceable(r.id.asInstanceOf[_Id].id,
r.typeName,
asJava(r.values).asInstanceOf[java.util.Map[String, Object]],
asJava(r.traitNames).asInstanceOf[java.util.List[String]],
asJava(r.traits).asInstanceOf[java.util.Map[String, IStruct]])
}
case l : List[_] => l.map(e => asJava(e)).asJava
case m : Map[_, _] => m.mapValues(v => asJava(v)).asJava
case _ => v
}
def asScala(v : Any) : Any = v match {
case i : Id => _Id(i._getId(), i.getVersion, i.getClassName)
case r : Referenceable => {
val traits = r.getTraits.map { tName =>
val t = r.getTrait(tName).asInstanceOf[Struct]
(tName -> _Struct(t.getTypeName, asScala(t.getValuesMap).asInstanceOf[Map[String, AnyRef]]))
}.toMap
_Reference(asScala(r.getId).asInstanceOf[_Id],
r.typeName, asScala(r.getValuesMap).asInstanceOf[Map[String, AnyRef]],
asScala(r.getTraits).asInstanceOf[List[String]],
traits.asInstanceOf[Map[String, _Struct]])
}
case s : Struct => _Struct(s.typeName, asScala(s.getValuesMap).asInstanceOf[Map[String, AnyRef]])
case l : java.util.List[_] => l.asScala.map(e => asScala(e)).toList
case m : java.util.Map[_, _] => m.asScala.mapValues(v => asScala(v)).toMap
case _ => v
}
val _formats = new DefaultFormats {
override val dateFormatter = TypeSystem.getInstance().getDateFormat.asInstanceOf[SimpleDateFormat]
override val typeHints = NoTypeHints
}
def buildFormat(withBigDecimals : Boolean) = {
if (withBigDecimals)
_formats + new BigDecimalSerializer + new BigIntegerSerializer
else
_formats
}
def _toJson(value: AnyRef, withBigDecimals : Boolean = false): String = {
implicit val formats = buildFormat(withBigDecimals)
val _s : AnyRef = asScala(value).asInstanceOf[AnyRef]
writePretty(_s)
}
def toJson(value: Struct, withBigDecimals : Boolean = false): String = {
_toJson(value, withBigDecimals)
}
def fromJsonStruct(jsonStr: String, withBigDecimals : Boolean = false): Struct = {
implicit val formats = buildFormat(withBigDecimals)
val _s = read[_Struct](jsonStr)
asJava(_s).asInstanceOf[Struct]
}
//def toJsonReferenceable(value: Referenceable, withBigDecimals : Boolean = false): String = _toJson(value, withBigDecimals)
def fromJsonReferenceable(jsonStr: String, withBigDecimals : Boolean = false): Referenceable = {
implicit val formats = buildFormat(withBigDecimals)
val _s = read[_Reference](jsonStr)
asJava(_s).asInstanceOf[Referenceable]
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment