Commit 1a3dc0e2 by Harish Butani

Initial Commit

parent a4f51455
# Eclipse
.classpath
.project
.settings/
# Intellij
.idea/
*.iml
*.iws
# Mac
.DS_Store
# Maven
log/
target/
# Emacs
*~
This diff is collapsed. Click to expand it.
package org.apache.metadata;
public interface IStruct {
String getTypeName();
Object get(String attrName) throws MetadataException;
void set(String attrName, Object val) throws MetadataException;
}
package org.apache.metadata;
public class MetadataException extends Exception {
public MetadataException() {
}
public MetadataException(String message) {
super(message);
}
public MetadataException(String message, Throwable cause) {
super(message, cause);
}
public MetadataException(Throwable cause) {
super(cause);
}
public MetadataException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
package org.apache.metadata;
import org.apache.metadata.storage.IRepository;
import org.apache.metadata.types.TypeSystem;
public class MetadataService {
final IRepository repo;
final TypeSystem typeSystem;
public static final ThreadLocal<MetadataService> currentSvc = new ThreadLocal<MetadataService>();
public static void setCurrentService(MetadataService svc) {
currentSvc.set(svc);
}
public static MetadataService getCurrentService() throws MetadataException {
MetadataService m = currentSvc.get();
if ( m == null ) {
throw new MetadataException("No MetadataService associated with current thread");
}
return m;
}
public static IRepository getCurrentRepository() throws MetadataException {
MetadataService m = currentSvc.get();
IRepository r = m == null ? null : m.getRepository();
if ( r == null ) {
throw new MetadataException("No Repository associated with current thread");
}
return r;
}
public static TypeSystem getCurrentTypeSystem() throws MetadataException {
MetadataService m = currentSvc.get();
TypeSystem t = m == null ? null : m.getTypeSystem();
if ( t == null ) {
throw new MetadataException("No TypeSystem associated with current thread");
}
return t;
}
public MetadataService(IRepository repo, TypeSystem typeSystem) {
this.typeSystem = typeSystem;
this.repo = repo;
}
public IRepository getRepository() {
return repo;
}
public TypeSystem getTypeSystem() {
return typeSystem;
}
}
package org.apache.metadata;
import java.util.HashMap;
import java.util.Map;
public class Struct implements IStruct {
public final String typeName;
private final Map<String, Object> values;
public Struct(String typeName) {
this.typeName = typeName;
values = new HashMap<String, Object>();
}
/**
@nopublic
*/
public Struct(String typeName, Map<String, Object> values) {
this(typeName);
this.values.putAll(values);
}
@Override
public String getTypeName() {
return typeName;
}
@Override
public Object get(String attrName) {
return values.get(attrName);
}
@Override
public void set(String attrName, Object value) {
values.put(attrName, value);
}
/**
* @nopublic
* @return
*/
public Map<String, Object> getValuesMap() {
return values;
}
}
package org.apache.metadata.storage;
import java.text.DateFormat;
public interface IRepository {
DateFormat getDateFormat();
DateFormat getTimestampFormat();
boolean allowNullsInCollections();
}
package org.apache.metadata.storage;
public class Id {
public static final int UNASSIGNED = -1;
public final int id;
public final String className;
public Id(int id, String className) {
this.id = id;
this.className = className;
}
public Id(String className) {
this(UNASSIGNED, className);
}
public boolean isUnassigned() {
return id == UNASSIGNED;
}
}
package org.apache.metadata.storage;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.metadata.IStruct;
import org.apache.metadata.MetadataException;
import org.apache.metadata.types.StructType;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;
public class TypedStruct implements IStruct {
public final StructType dataType;
public final boolean nullFlags[];
public final boolean[] bools;
public final byte[] bytes;
public final short[] shorts;
public final int[] ints;
public final long[] longs;
public final float[] floats;
public final double[] doubles;
public final BigDecimal[] bigDecimals;
public final BigInteger[] bigIntegers;
public final Date[] dates;
public final String[] strings;
public final ImmutableList<Object>[] arrays;
public final ImmutableMap<Object,Object>[] maps;
public final TypedStruct[] structs;
public TypedStruct(StructType dataType, boolean[] nullFlags, boolean[] bools, byte[] bytes, short[] shorts, int[] ints,
long[] longs, float[] floats, double[] doubles,
BigDecimal[] bigDecimals, BigInteger[] bigIntegers, Date[] dates, String[] strings,
ImmutableList<Object>[] arrays, ImmutableMap<Object, Object>[] maps, TypedStruct[] structs) {
assert dataType != null;
this.dataType = dataType;
this.nullFlags = nullFlags;
this.bools = bools;
this.bytes = bytes;
this.shorts = shorts;
this.ints = ints;
this.longs = longs;
this.floats = floats;
this.doubles = doubles;
this.bigDecimals = bigDecimals;
this.bigIntegers = bigIntegers;
this.dates = dates;
this.strings = strings;
this.arrays = arrays;
this.maps = maps;
this.structs = structs;
for(int i=0; i<nullFlags.length; i++) {
nullFlags[i] = true;
}
}
@Override
public String getTypeName() {
return dataType.getName();
}
@Override
public Object get(String attrName) throws MetadataException {
return dataType.get(this, attrName);
}
@Override
public void set(String attrName, Object val) throws MetadataException {
dataType.set(this, attrName, val);
}
@Override
public String toString() {
try {
StringBuilder b = new StringBuilder();
dataType.output(this, b, "");
return b.toString();
} catch(MetadataException me) {
throw new RuntimeException(me);
}
}
}
package org.apache.metadata.storage.memory;
import org.apache.metadata.storage.IRepository;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
public class MemRepository implements IRepository {
private static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
private static SimpleDateFormat timestampFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
@Override
public DateFormat getDateFormat() {
return dateFormat;
}
@Override
public DateFormat getTimestampFormat() {
return timestampFormat;
}
@Override
public boolean allowNullsInCollections() {
return false;
}
}
package org.apache.metadata.types;
import org.apache.metadata.MetadataException;
import java.io.IOException;
abstract class AbstractDataType<T> implements IDataType<T> {
protected T convertNull( Multiplicity m) throws MetadataException {
if (!m.nullAllowed() ) {
throw new ValueConversionException.NullConversionException(m);
}
return null;
}
protected void outputVal(String val, Appendable buf, String prefix) throws MetadataException {
try {
buf.append(prefix).append(val);
} catch(IOException ie) {
throw new MetadataException(ie);
}
}
@Override
public void output(T val, Appendable buf, String prefix) throws MetadataException {
outputVal(val == null ? "<null>" : val.toString(), buf, prefix);
}
}
package org.apache.metadata.types;
public final class AttributeDefinition {
public final String name;
public final String dataTypeName;
public final Multiplicity multiplicity;
public final boolean isComposite;
/**
* If this is a reference attribute, then the name of the attribute on the Class
* that this refers to.
*/
public final String reverseAttributeName;
public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity,
boolean isComposite, String reverseAttributeName) {
this.name = name;
this.dataTypeName = dataTypeName;
this.multiplicity = multiplicity;
this.isComposite = isComposite;
this.reverseAttributeName = reverseAttributeName;
}
}
package org.apache.metadata.types;
import org.apache.metadata.MetadataException;
public class AttributeInfo {
public final String name;
private IDataType dataType;
public final Multiplicity multiplicity;
public final boolean isComposite;
/**
* If this is a reference attribute, then the name of the attribute on the Class
* that this refers to.
*/
public final String reverseAttributeName;
AttributeInfo(TypeSystem t, AttributeDefinition def) throws MetadataException {
this.name = def.name;
this.dataType = t.getDataType(def.dataTypeName);
this.multiplicity = def.multiplicity;
this.isComposite = def.isComposite;
this.reverseAttributeName = def.reverseAttributeName;
}
public IDataType dataType() {
return dataType;
}
void setDataType(IDataType dT) {
dataType = dT;
}
}
package org.apache.metadata.types;
import org.apache.metadata.types.DataTypes.TypeCategory;
import org.apache.metadata.MetadataException;
public interface IDataType<T> {
String getName();
T convert(Object val, Multiplicity m) throws MetadataException;
TypeCategory getTypeCategory();
void output(T val, Appendable buf, String prefix) throws MetadataException;
}
package org.apache.metadata.types;
public final class Multiplicity {
public final int lower;
public final int upper;
public final boolean isUnique;
private Multiplicity(int lower, int upper, boolean isUnique) {
assert lower >= 0;
assert upper >= 1;
assert upper >= lower;
this.lower = lower;
this.upper = upper;
this.isUnique = isUnique;
}
public boolean nullAllowed() {
return lower == 0;
}
@Override
public String toString() {
return "Multiplicity{" +
"lower=" + lower +
", upper=" + upper +
", isUnique=" + isUnique +
'}';
}
public static final Multiplicity OPTIONAL = new Multiplicity(0, 1, false);
public static final Multiplicity REQUIRED = new Multiplicity(1, 1, false);
public static final Multiplicity COLLECTION = new Multiplicity(1, Integer.MAX_VALUE, false);
public static final Multiplicity SET = new Multiplicity(1, Integer.MAX_VALUE, true);
}
package org.apache.metadata.types;
import com.google.common.collect.ImmutableList;
import org.apache.metadata.MetadataException;
import java.util.HashMap;
import java.util.Map;
public class TypeSystem {
private Map<String, IDataType> types;
public TypeSystem() throws MetadataException {
types = new HashMap<String, IDataType>();
registerPrimitiveTypes();
}
public ImmutableList<String> getTypeNames() {
return ImmutableList.copyOf(types.keySet());
}
private void registerPrimitiveTypes() {
types.put(DataTypes.BOOLEAN_TYPE.getName(), DataTypes.BOOLEAN_TYPE);
types.put(DataTypes.BYTE_TYPE.getName(), DataTypes.BYTE_TYPE);
types.put(DataTypes.SHORT_TYPE.getName(), DataTypes.SHORT_TYPE);
types.put(DataTypes.INT_TYPE.getName(), DataTypes.INT_TYPE);
types.put(DataTypes.LONG_TYPE.getName(), DataTypes.LONG_TYPE);
types.put(DataTypes.FLOAT_TYPE.getName(), DataTypes.FLOAT_TYPE);
types.put(DataTypes.DOUBLE_TYPE.getName(), DataTypes.DOUBLE_TYPE);
types.put(DataTypes.BIGINTEGER_TYPE.getName(), DataTypes.BIGINTEGER_TYPE);
types.put(DataTypes.BIGDECIMAL_TYPE.getName(), DataTypes.BIGDECIMAL_TYPE);
types.put(DataTypes.DATE_TYPE.getName(), DataTypes.DATE_TYPE);
types.put(DataTypes.STRING_TYPE.getName(), DataTypes.STRING_TYPE);
}
public IDataType getDataType(String name) throws MetadataException {
if ( types.containsKey(name) ) {
return types.get(name);
}
throw new MetadataException(String.format("Unknown datatype: %s", name));
}
public StructType defineStructType(String name,
boolean errorIfExists,
AttributeDefinition... attrDefs) throws MetadataException {
if ( types.containsKey(name) ) {
throw new MetadataException(String.format("Cannot redefine type %s", name));
}
assert name != null;
AttributeInfo[] infos = new AttributeInfo[attrDefs.length];
Map<Integer, AttributeDefinition> recursiveRefs = new HashMap<Integer, AttributeDefinition>();
try {
types.put(name, new StructType(name));
for (int i = 0; i < attrDefs.length; i++) {
infos[i] = new AttributeInfo(this, attrDefs[i]);
if ( attrDefs[i].dataTypeName == name ) {
recursiveRefs.put(i, attrDefs[i]);
}
}
} catch(MetadataException me) {
types.remove(name);
throw me;
} catch(RuntimeException re) {
types.remove(name);
throw re;
}
StructType sT = new StructType(name, infos);
types.put(name, sT);
for(Map.Entry<Integer, AttributeDefinition> e : recursiveRefs.entrySet()) {
infos[e.getKey()].setDataType(sT);
}
return sT;
}
public DataTypes.ArrayType defineArrayType(IDataType elemType) throws MetadataException {
assert elemType != null;
DataTypes.ArrayType dT = new DataTypes.ArrayType(elemType);
types.put(dT.getName(), dT);
return dT;
}
public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType) throws MetadataException {
assert keyType != null;
assert valueType != null;
DataTypes.MapType dT = new DataTypes.MapType(keyType, valueType);
types.put(dT.getName(), dT);
return dT;
}
}
package org.apache.metadata.types;
import org.apache.metadata.MetadataException;
public class ValueConversionException extends MetadataException {
public ValueConversionException(IDataType typ, Object val) {
this(typ, val, (Throwable) null);
}
public ValueConversionException(IDataType typ, Object val, Throwable t) {
super(String.format("Cannot convert value '%s' to datatype %s", val.toString(), typ.getName()), t);
}
public ValueConversionException(IDataType typ, Object val, String msg) {
super(String.format("Cannot convert value '%s' to datatype %s because: %s",
val.toString(), typ.getName(), msg));
}
protected ValueConversionException(String msg) {
super(msg);
}
public static class NullConversionException extends ValueConversionException {
public NullConversionException(Multiplicity m) {
super(String.format("Null value not allowed for multiplicty %s", m));
}
}
}
akka {
//loggers = [akka.event.slf4j.Slf4jLogger]
loglevel = debug
actor {
debug {
receive = on
lifecycle = on
}
}
}
app {
interface="localhost"
port= 8080
}
\ No newline at end of file
package org.apache.metadata.cli
import org.apache.metadata.MetadataService
import org.apache.metadata.MetadataService
import org.apache.metadata.storage.memory.MemRepository
import org.apache.metadata.types.TypeSystem
import scala.reflect.io.File
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.{IMain, ILoop}
object Console extends App {
val settings = new Settings
settings.usejavacp.value = true
settings.deprecation.value = true
settings.bootclasspath.value += """/Users/hbutani/.m2/repository/org/apache/metadata/1.0-SNAPSHOT/metadata-1.0-SNAPSHOT.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-compiler/2.10.4/scala-compiler-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-reflect/2.10.4/scala-reflect-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/jline/2.10.4/jline-2.10.4.jar:/Users/hbutani/.m2/repository/org/fusesource/jansi/jansi/1.4/jansi-1.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-library/2.10.4/scala-library-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-actors/2.10.4/scala-actors-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scalap/2.10.4/scalap-2.10.4.jar:/Users/hbutani/.m2/repository/org/scalatest/scalatest_2.10/2.2.0/scalatest_2.10-2.2.0.jar:/Users/hbutani/.m2/repository/org/scalamacros/quasiquotes_2.10/2.0.1/quasiquotes_2.10-2.0.1.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-native_2.10/3.2.11/json4s-native_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-core_2.10/3.2.11/json4s-core_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-ast_2.10/3.2.11/json4s-ast_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/com/thoughtworks/paranamer/paranamer/2.6/paranamer-2.6.jar:/Users/hbutani/.m2/repository/com/github/nscala-time/nscala-time_2.10/1.6.0/nscala-time_2.10-1.6.0.jar:/Users/hbutani/.m2/repository/joda-time/joda-time/2.5/joda-time-2.5.jar:/Users/hbutani/.m2/repository/org/joda/joda-convert/1.2/joda-convert-1.2.jar:/Users/hbutani/.m2/repository/com/typesafe/config/1.2.1/config-1.2.1.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-actor_2.10/2.3.7/akka-actor_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-testkit_2.10/2.3.7/akka-testkit_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-slf4j_2.10/2.3.7/akka-slf4j_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.jar:/Users/hbutani/.m2/repository/io/spray/spray-routing/1.3.1/spray-routing-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-http/1.3.1/spray-http-1.3.1.jar:/Users/hbutani/.m2/repository/org/parboiled/parboiled-scala_2.10/1.1.6/parboiled-scala_2.10-1.1.6.jar:/Users/hbutani/.m2/repository/org/parboiled/parboiled-core/1.1.6/parboiled-core-1.1.6.jar:/Users/hbutani/.m2/repository/io/spray/spray-util/1.3.1/spray-util-1.3.1.jar:/Users/hbutani/.m2/repository/com/chuusai/shapeless_2.10/1.2.4/shapeless_2.10-1.2.4.jar:/Users/hbutani/.m2/repository/io/spray/spray-can/1.3.1/spray-can-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-io/1.3.1/spray-io-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-httpx/1.3.1/spray-httpx-1.3.1.jar:/Users/hbutani/.m2/repository/org/jvnet/mimepull/mimepull/1.9.4/mimepull-1.9.4.jar:/Users/hbutani/.m2/repository/io/spray/spray-testkit/1.3.1/spray-testkit-1.3.1.jar:/Users/hbutani/.m2/repository/com/google/guava/guava/11.0.2/guava-11.0.2.jar:/Users/hbutani/.m2/repository/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/Users/hbutani/.m2/repository/junit/junit/4.10/junit-4.10.jar:/Users/hbutani/.m2/repository/org/hamcrest/hamcrest-core/1.1/hamcrest-core-1.1.jar"""
val in = new IMain(settings){
override protected def parentClassLoader = settings.getClass.getClassLoader()
}
new SampleILoop().process(settings)
}
class SampleILoop extends ILoop {
override def prompt = "==> "
//intp = Console.in
val ts: TypeSystem = new TypeSystem
val mr: MemRepository = new MemRepository
val ms : MetadataService = new MetadataService(mr, ts)
MetadataService.setCurrentService(ms)
addThunk {
intp.beQuietDuring {
intp.addImports("java.lang.Math._")
intp.addImports("org.json4s.native.Serialization.{read, write => swrite}")
intp.addImports("org.json4s._")
intp.addImports("org.json4s.native.JsonMethods._")
intp.addImports("org.apache.metadata.dsl._")
intp.bindValue("service", ms)
//intp.bindValue("cp", intp.compilerClasspath)
}
}
override def printWelcome() {
echo("\n" +
" \\,,,/\n" +
" (o o)\n" +
"-----oOOo-(_)-oOOo-----")
}
}
package org.apache.metadata.dsl
import org.apache.metadata.storage.TypedStruct
import scala.language.dynamics
class DynamicTypedStruct(val ts : TypedStruct) extends Dynamic {
def selectDynamic(name: String) = ts.get(name)
def updateDynamic(name: String)(value: Any) {
var value1 = value
if ( value != null && value.isInstanceOf[DynamicTypedStruct]) {
value1 = value.asInstanceOf[DynamicTypedStruct].ts
}
ts.set(name, value1)
}
def dataType = ts.dataType
}
package org.apache.metadata
import org.apache.metadata.json.{BigIntegerSerializer, BigDecimalSerializer, TypedStructSerializer, Serialization}
import org.apache.metadata.storage.TypedStruct
import org.apache.metadata.types._
import scala.collection.JavaConverters._
import org.json4s._
import org.json4s.native.Serialization.{read, write => swrite}
import org.json4s.native.JsonMethods._
import scala.language.implicitConversions
package object dsl {
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new BigDecimalSerializer + new BigIntegerSerializer
def service = MetadataService.getCurrentService
def ts = MetadataService.getCurrentTypeSystem
def repo = MetadataService.getCurrentRepository
val BOOLEAN_TYPE = DataTypes.BOOLEAN_TYPE
val BYTE_TYPE = DataTypes.BYTE_TYPE
val SHORT_TYPE = DataTypes.SHORT_TYPE
val INT_TYPE = DataTypes.INT_TYPE
val LONG_TYPE = DataTypes.LONG_TYPE
val FLOAT_TYPE = DataTypes.FLOAT_TYPE
val DOUBLE_TYPE = DataTypes.DOUBLE_TYPE
val BIGINT_TYPE = DataTypes.BIGINTEGER_TYPE
val BIGDECIMAL_TYPE = DataTypes.BIGDECIMAL_TYPE
val DATE_TYPE = DataTypes.DATE_TYPE
val STRING_TYPE = DataTypes.STRING_TYPE
val ATTR_OPTIONAL = Multiplicity.OPTIONAL
val ATTR_REQUIRED = Multiplicity.REQUIRED
def arrayType(dT : IDataType[_]) = ts.defineArrayType(dT)
def mapType(kT : IDataType[_], vT : IDataType[_]) = ts.defineMapType(kT, vT)
def attrDef(name : String, dT : IDataType[_],
m : Multiplicity = Multiplicity.OPTIONAL,
isComposite: Boolean = false,
reverseAttributeName: String = null) = {
require(name != null)
require(dT != null)
new AttributeDefinition(name, dT.getName, m, isComposite, reverseAttributeName)
}
def listTypes = ts.getTypeNames
def defineStructType(name : String, attrDef : AttributeDefinition*) = {
require(name != null)
ts.defineStructType(name, false, attrDef:_*)
}
def createInstance(typeName : String, jsonStr : String)(implicit formats: Formats) = {
val j = parse(jsonStr)
assert(j.isInstanceOf[JObject])
var j1 = j.asInstanceOf[JObject]
j1 = JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) :: j1.obj)
new DynamicTypedStruct(Extraction.extract[TypedStruct](j1))
}
def createInstance(typeName : String) = {
new DynamicTypedStruct(
ts.getDataType(typeName).asInstanceOf[StructType].createInstance())
}
implicit def dynTypedStructToTypedStruct(s : DynamicTypedStruct) = s.ts
implicit def dynTypedStructToJson(s : DynamicTypedStruct)(implicit formats: Formats) = {
Extraction.decompose(s.ts)(formats)
}
}
package org.apache.metadata.json
import org.apache.metadata.types.DataTypes.{MapType, TypeCategory, ArrayType}
import org.apache.metadata.{MetadataException, MetadataService}
import org.apache.metadata.types._
import org.json4s.JsonAST.JInt
import org.json4s._
import org.json4s.native.Serialization.{read, write => swrite}
import org.json4s.reflect.{ScalaType, Reflector}
import java.util.regex.Pattern
import java.util.Date
import org.apache.metadata.storage.TypedStruct
import collection.JavaConversions._
import scala.collection.JavaConverters._
class BigDecimalSerializer extends CustomSerializer[java.math.BigDecimal](format => ( {
case JDecimal(e) => e.bigDecimal
}, {
case e: java.math.BigDecimal => JDecimal(new BigDecimal(e))
}
))
class BigIntegerSerializer extends CustomSerializer[java.math.BigInteger](format => ( {
case JInt(e) => e.bigInteger
}, {
case e: java.math.BigInteger => JInt(new BigInt(e))
}
))
class TypedStructSerializer extends Serializer[TypedStruct] {
def extractList(lT : ArrayType, value : JArray)(implicit format: Formats) : Any = {
val dT = lT.getElemType
value.arr.map(extract(dT, _)).asJava
}
def extractMap(mT : MapType, value : JObject)(implicit format: Formats) : Any = {
val kT = mT.getKeyType
val vT = mT.getValueType
value.obj.map{f : JField => f._1 -> extract(vT, f._2) }.toMap.asJava
}
def extract(dT : IDataType[_], value : JValue)(implicit format: Formats) : Any = value match {
case value : JBool => Extraction.extract[Boolean](value)
case value : JInt => Extraction.extract[Int](value)
case value : JDouble => Extraction.extract[Double](value)
case value : JDecimal => Extraction.extract[BigDecimal](value)
case value : JString => Extraction.extract[String](value)
case JNull => null
case value : JArray => extractList(dT.asInstanceOf[ArrayType], value.asInstanceOf[JArray])
case value : JObject if dT.getTypeCategory eq TypeCategory.MAP =>
extractMap(dT.asInstanceOf[MapType], value.asInstanceOf[JObject])
case value : JObject =>
Extraction.extract[TypedStruct](value)
}
def deserialize(implicit format: Formats) = {
case (TypeInfo(clazz, ptype), json) if classOf[TypedStruct].isAssignableFrom(clazz) => json match {
case JObject(fs) =>
val(typ, fields) = fs.partition(f => f._1 == Serialization.STRUCT_TYPE_FIELD_NAME)
val typName = typ(0)._2.asInstanceOf[JString].s
val sT = MetadataService.getCurrentTypeSystem().getDataType(typName).asInstanceOf[StructType]
val s = sT.createInstance()
fields.foreach { f =>
val fName = f._1
val fInfo = sT.fields(fName)
if ( fInfo != null ) {
//println(fName)
var v = f._2
if ( fInfo.dataType().isInstanceOf[StructType] ) {
v = v match {
case JObject(sFields) =>
JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(fInfo.dataType.getName)) :: sFields)
case x => x
}
}
s.set(fName, extract(fInfo.dataType(), v))
}
}
s
case x => throw new MappingException("Can't convert " + x + " to TypedStruct")
}
}
/**
* Implicit conversion from `java.math.BigInteger` to `scala.BigInt`.
* match the builtin conversion for BigDecimal.
* See https://groups.google.com/forum/#!topic/scala-language/AFUamvxu68Q
*/
//implicit def javaBigInteger2bigInt(x: java.math.BigInteger): BigInt = new BigInt(x)
def serialize(implicit format: Formats) = {
case e: TypedStruct =>
val fields = e.dataType.fields.map {
case (fName, info) => {
var v = e.get(fName)
if ( v != null && (info.dataType().getTypeCategory eq TypeCategory.MAP) ) {
v = v.asInstanceOf[java.util.Map[_,_]].toMap
}
JField(fName, Extraction.decompose(v))
}
}.toList.map(_.asInstanceOf[JField])
JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(e.dataType.getName)) :: fields)
}
}
object Serialization {
val STRUCT_TYPE_FIELD_NAME = "$typeName$"
}
package org.apache.metadata;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import junit.framework.TestCase;
import org.apache.metadata.storage.IRepository;
import org.apache.metadata.storage.memory.MemRepository;
import org.apache.metadata.types.*;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;
import java.util.Map;
public abstract class BaseTest {
protected MetadataService ms;
public static final String STRUCT_TYPE_1 = "t1";
public static final String STRUCT_TYPE_2 = "t2";
@BeforeClass
public static void setupClass() throws MetadataException {
TypeSystem ts = new TypeSystem();
MemRepository mr = new MemRepository();
MetadataService.setCurrentService(new MetadataService(mr, ts));
StructType structType = ts.defineStructType(STRUCT_TYPE_1,
true,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
createOptionalAttrDef("e", DataTypes.INT_TYPE),
createOptionalAttrDef("f", DataTypes.INT_TYPE),
createOptionalAttrDef("g", DataTypes.LONG_TYPE),
createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
createOptionalAttrDef("l", DataTypes.DATE_TYPE),
createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)));
StructType recursiveStructType = ts.defineStructType(STRUCT_TYPE_2,
true,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("s", STRUCT_TYPE_2));
}
public static Struct createStruct(MetadataService ms) throws MetadataException {
StructType structType = (StructType) ms.getTypeSystem().getDataType(STRUCT_TYPE_1);
Struct s = new Struct(structType.getName());
s.set("a", 1);
s.set("b", true);
s.set("c", (byte)1);
s.set("d", (short)2);
s.set("e", 1);
s.set("f", 1);
s.set("g", 1L);
s.set("h", 1.0f);
s.set("i", 1.0);
s.set("j", BigInteger.valueOf(1L));
s.set("k", new BigDecimal(1));
s.set("l", new Date(System.currentTimeMillis()));
s.set("m", Lists.<Integer>asList(Integer.valueOf(1), new Integer[]{Integer.valueOf(1)}));
s.set("n", Lists.<BigDecimal>asList(BigDecimal.valueOf(1.1), new BigDecimal[] {BigDecimal.valueOf(1.1)}));
Map<String, Double> hm = Maps.<String, Double>newHashMap();
hm.put("a", 1.0);
hm.put("b",2.0);
s.set("o", hm);
return s;
}
@Before
public void setup() throws MetadataException {
ms = MetadataService.getCurrentService();
}
public static AttributeDefinition createOptionalAttrDef(String name,
IDataType dataType
) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.OPTIONAL, false, null);
}
public static AttributeDefinition createOptionalAttrDef(String name,
String dataType
) {
return new AttributeDefinition(name, dataType, Multiplicity.OPTIONAL, false, null);
}
public static AttributeDefinition createRequiredAttrDef(String name,
IDataType dataType
) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.REQUIRED, false, null);
}
public static AttributeDefinition createRequiredAttrDef(String name,
String dataType
) {
return new AttributeDefinition(name, dataType, Multiplicity.REQUIRED, false, null);
}
}
package org.apache.metadata;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import junit.framework.TestCase;
import org.apache.metadata.storage.TypedStruct;
import org.apache.metadata.types.*;
import org.junit.Before;
import org.junit.Test;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;
import java.util.Map;
public class StructTest extends BaseTest {
StructType structType;
StructType recursiveStructType;
@Before
public void setup() throws MetadataException {
super.setup();
structType = (StructType) ms.getTypeSystem().getDataType(STRUCT_TYPE_1);
recursiveStructType = (StructType) ms.getTypeSystem().getDataType(STRUCT_TYPE_2);
}
@Test
public void test1() throws MetadataException {
Struct s = createStruct(ms);
TypedStruct ts = structType.convert(s, Multiplicity.REQUIRED);
System.out.println(ts);
}
@Test
public void testRecursive() throws MetadataException {
Struct s1 = new Struct(recursiveStructType.getName());
s1.set("a", 1);
Struct s2 = new Struct(recursiveStructType.getName());
s2.set("a", 1);
s2.set("s", s1);
TypedStruct ts = recursiveStructType.convert(s2, Multiplicity.REQUIRED);
System.out.println(ts);
}
}
package org.apache.metadata.dsl
import org.apache.metadata.hive.HiveMockMetadataService
import org.apache.metadata.json.{BigIntegerSerializer, BigDecimalSerializer, TypedStructSerializer}
import org.apache.metadata.storage.TypedStruct
import org.apache.metadata.{Struct, BaseTest}
import org.apache.metadata.types.{IDataType, Multiplicity, StructType}
import org.json4s.NoTypeHints
import org.json4s.native.Serialization._
import org.junit.{Test, Before}
import org.apache.metadata.dsl._
import org.json4s.native.JsonMethods._
class DSLTest extends BaseTest {
@Before
override def setup {
super.setup
}
@Test def test1 {
// 1. Existing Types in System
println(s"Existing Types:\n\t ${listTypes}\n")
defineStructType("mytype",
attrDef("a", INT_TYPE, ATTR_REQUIRED),
attrDef("b", BOOLEAN_TYPE),
attrDef("c", BYTE_TYPE),
attrDef("d", SHORT_TYPE),
attrDef("e", INT_TYPE),
attrDef("f", INT_TYPE),
attrDef("g", LONG_TYPE),
attrDef("h", FLOAT_TYPE),
attrDef("i", DOUBLE_TYPE),
attrDef("j", BIGINT_TYPE),
attrDef("k", BIGDECIMAL_TYPE),
attrDef("l", DATE_TYPE),
attrDef("m", arrayType(INT_TYPE)),
attrDef("n", arrayType(BIGDECIMAL_TYPE)),
attrDef("o", mapType(STRING_TYPE, DOUBLE_TYPE)))
// 2. 'mytype' available as a a Type
println(s"Added Type:\n\t ${listTypes}\n")
// 3. Create a 'mytype' instance from Json
val i = createInstance("mytype", """
{
"$typeName$":"mytype",
"e":1,
"n":[1,1.1],
"h":1.0,
"b":true,
"k":1,
"j":1,
"d":2,
"m":[1,1],
"g":1,
"a":1,
"i":1.0,
"c":1,
"l":"2014-12-03T08:00:00.000Z",
"f":1,
"o":{
"b":2.0,
"a":1.0
}
}
""")
// 4. Navigate mytype instance in code
println("Examples of Navigate mytype instance in code:\n")
println(s"i.a -> ${i.a}")
println(s"i.o -> ${i.o}")
println(s"i.o.keys -> ${i.o.asInstanceOf[java.util.Map[_,_]].keySet}")
// 5. Serialize mytype instance to Json
println(s"\nJSON:\n ${pretty(render(i))}")
}
@Test def test2 {
// 1. Existing Types in System
println(s"Existing Types:\n\t ${listTypes}\n")
val addrType = defineStructType("addressType",
attrDef("houseNum", INT_TYPE, ATTR_REQUIRED),
attrDef("street", STRING_TYPE, ATTR_REQUIRED),
attrDef("city", STRING_TYPE, ATTR_REQUIRED),
attrDef("state", STRING_TYPE, ATTR_REQUIRED),
attrDef("zip", INT_TYPE, ATTR_REQUIRED),
attrDef("country", STRING_TYPE, ATTR_REQUIRED)
)
val personType = defineStructType("personType",
attrDef("first_name", STRING_TYPE, ATTR_REQUIRED),
attrDef("last_name", STRING_TYPE, ATTR_REQUIRED),
attrDef("address", addrType)
)
// 2. updated Types in System
println(s"Updated Types:\n\t ${listTypes}")
// 3. Construct a Person in Code
val person = createInstance("personType")
val address = createInstance("addressType")
person.first_name = "Meta"
person.last_name = "Hadoop"
address.houseNum = 3460
address.street = "W Bayshore Rd"
address.city = "Palo Alto"
address.state = "CA"
address.zip = 94303
address.country = "USA"
person.address = address
// 4. Convert to Json
println(s"\nJSON:\n ${pretty(render(person))}")
val p2 = createInstance("personType", """{
"first_name":"Meta",
"address":{
"houseNum":3460,
"city":"Palo Alto",
"country":"USA",
"state":"CA",
"zip":94303,
"street":"W Bayshore Rd"
},
"last_name":"Hadoop"
}""")
}
@Test def testHive(): Unit = {
val hiveTable = HiveMockMetadataService.getTable("tpcds", "date_dim")
println(hiveTable)
//name : String, typeName : String, comment : String
val fieldType = defineStructType("FieldSchema",
attrDef("name", STRING_TYPE, ATTR_REQUIRED),
attrDef("typeName", STRING_TYPE, ATTR_REQUIRED),
attrDef("comment", STRING_TYPE)
)
/*
SerDe(name : String, serializationLib : String, parameters : Map[String, String])
*/
defineStructType("SerDe",
attrDef("name", STRING_TYPE, ATTR_REQUIRED),
attrDef("serializationLib", STRING_TYPE, ATTR_REQUIRED),
attrDef("parameters", mapType(STRING_TYPE, STRING_TYPE))
)
/*
StorageDescriptor(fields : List[FieldSchema],
location : String, inputFormat : String,
outputFormat : String, compressed : Boolean,
numBuckets : Int, bucketColumns : List[String],
sortColumns : List[String],
parameters : Map[String, String],
storedAsSubDirs : Boolean
)
*/
val sdType = defineStructType("StorageDescriptor",
attrDef("location", STRING_TYPE, ATTR_REQUIRED),
attrDef("inputFormat", STRING_TYPE, ATTR_REQUIRED),
attrDef("outputFormat", STRING_TYPE, ATTR_REQUIRED),
attrDef("compressed", BOOLEAN_TYPE),
attrDef("numBuckets", INT_TYPE),
attrDef("bucketColumns", arrayType(STRING_TYPE)),
attrDef("sortColumns", arrayType(STRING_TYPE)),
attrDef("parameters", mapType(STRING_TYPE, STRING_TYPE)),
attrDef("storedAsSubDirs", BOOLEAN_TYPE)
)
/*
case class Table(dbName : String, tableName : String, storageDesc : StorageDescriptor,
parameters : Map[String, String],
tableType : String)
*/
defineStructType("Table",
attrDef("dbName", STRING_TYPE, ATTR_REQUIRED),
attrDef("tableName", STRING_TYPE, ATTR_REQUIRED),
attrDef("storageDesc", sdType, ATTR_REQUIRED),
attrDef("compressed", BOOLEAN_TYPE),
attrDef("numBuckets", INT_TYPE),
attrDef("bucketColumns", arrayType(STRING_TYPE)),
attrDef("sortColumns", arrayType(STRING_TYPE)),
attrDef("parameters", mapType(STRING_TYPE, STRING_TYPE)),
attrDef("storedAsSubDirs", BOOLEAN_TYPE)
)
}
}
package org.apache.metadata.hive
object HiveMockMetadataService {
case class FieldSchema(name : String, typeName : String, comment : String)
case class SerDe(name : String, serializationLib : String, parameters : Map[String, String])
case class StorageDescriptor(fields : List[FieldSchema],
location : String, inputFormat : String,
outputFormat : String, compressed : Boolean,
numBuckets : Int, bucketColumns : List[String],
sortColumns : List[String],
parameters : Map[String, String],
storedAsSubDirs : Boolean
)
case class Table(dbName : String, tableName : String, storageDesc : StorageDescriptor,
parameters : Map[String, String],
tableType : String)
def getTable(dbName : String, table : String) : Table = {
return Table(dbName, table,
StorageDescriptor(
List[FieldSchema](
FieldSchema("d_date_sk", "int", null),
FieldSchema("d_date_id", "string", null),
FieldSchema("d_date", "string", null),
FieldSchema("d_month_seq", "int", null),
FieldSchema("d_week_seq", "int", null),
FieldSchema("d_quarter_seq", "int", null),
FieldSchema("d_year", "int", null),
FieldSchema("d_dow", "int", null),
FieldSchema("d_moy", "int", null),
FieldSchema("d_dom", "int", null),
FieldSchema("d_qoy", "int", null),
FieldSchema("d_fy_year", "int", null),
FieldSchema("d_fy_quarter_seq", "int", null),
FieldSchema("d_fy_week_seq", "int", null),
FieldSchema("d_day_name", "string", null),
FieldSchema("d_quarter_name", "string", null),
FieldSchema("d_holiday", "string", null),
FieldSchema("d_weekend", "string", null),
FieldSchema("d_following_holiday", "string", null),
FieldSchema("d_first_dom", "int", null),
FieldSchema("d_last_dom", "int", null),
FieldSchema("d_same_day_ly", "int", null),
FieldSchema("d_same_day_lq", "int", null),
FieldSchema("d_current_day", "string", null),
FieldSchema("d_current_week", "string", null),
FieldSchema("d_current_month", "string", null),
FieldSchema("d_current_quarter", "string", null),
FieldSchema("d_current_year", "string", null)
),
"file:/tmp/warehouse/tpcds.db/date_dim",
"org.apache.hadoop.hive.ql.io.orc.OrcInputFormat",
"org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat",
false,
0,List[String](), List[String](),
Map[String,String](),
false
),
Map[String,String](),
"Table")
}
}
package org.apache.metadata.json
import org.apache.metadata.Struct
import org.apache.metadata.storage.TypedStruct
import org.apache.metadata.storage.TypedStruct
import org.apache.metadata.types.Multiplicity
import org.apache.metadata.types.StructType
import org.apache.metadata.{Struct, BaseTest}
import org.apache.metadata.types.{Multiplicity, StructType}
import org.json4s.NoTypeHints
import org.junit.Before
import org.junit.Test
import org.json4s._
import org.json4s.native.Serialization.{read, write => swrite}
import org.json4s.native.JsonMethods._
class SerializationTest extends BaseTest {
private[metadata] var structType: StructType = null
private[metadata] var recursiveStructType: StructType = null
@Before
override def setup {
super.setup
structType = ms.getTypeSystem.getDataType(BaseTest.STRUCT_TYPE_1).asInstanceOf[StructType]
recursiveStructType = ms.getTypeSystem.getDataType(BaseTest.STRUCT_TYPE_2).asInstanceOf[StructType]
}
@Test def test1 {
val s: Struct = BaseTest.createStruct(ms)
val ts: TypedStruct = structType.convert(s, Multiplicity.REQUIRED)
println("Typed Struct :")
println(ts)
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new BigDecimalSerializer + new BigIntegerSerializer
val ser = swrite(ts)
println("Json representation :")
println(ser)
val ts1 = read[TypedStruct](ser)
println("Typed Struct read back:")
println(ts1)
}
@Test def test2 {
val s: Struct = BaseTest.createStruct(ms)
val ts: TypedStruct = structType.convert(s, Multiplicity.REQUIRED)
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new BigDecimalSerializer + new BigIntegerSerializer
val ts1 = read[TypedStruct](
"""
{"$typeName$":"t1","e":1,"n":[1.1,1.1],"h":1.0,"b":true,"k":1,"j":1,"d":2,"m":[1,1],"g":1,"a":1,"i":1.0,
"c":1,"l":"2014-12-03T19:38:55.053Z","f":1,"o":{"b":2.0,"a":1.0}}""")
println("Typed Struct read from string:")
println(ts1)
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment