Commit 1a3dc0e2 by Harish Butani

Initial Commit

parent a4f51455
# Eclipse
.classpath
.project
.settings/
# Intellij
.idea/
*.iml
*.iws
# Mac
.DS_Store
# Maven
log/
target/
# Emacs
*~
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache</groupId>
<artifactId>metadata</artifactId>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>
<prerequisites>
<maven>3.0.4</maven>
</prerequisites>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.6</java.version>
<scala.version>2.10.4</scala.version>
<scala.binary.version>2.10</scala.binary.version>
<scala.macros.version>2.0.1</scala.macros.version>
<slf4j.version>1.7.5</slf4j.version>
<log4j.version>1.2.17</log4j.version>
<hadoop.version>2.4.0</hadoop.version>
<protobuf.version>2.4.1</protobuf.version>
<yarn.version>${hadoop.version}</yarn.version>
<hbase.version>0.94.6</hbase.version>
<hive.version>0.13.1</hive.version>
<akka.version>2.3.7</akka.version>
<spray.version>1.3.1</spray.version>
<guava.version>11.0.2</guava.version>
<PermGen>64m</PermGen>
<MaxPermGen>512m</MaxPermGen>
</properties>
<repositories>
<repository>
<id>central</id>
<!-- This should be at top, it makes maven try the central repo first and then others and hence faster dep resolution -->
<name>Maven Repository</name>
<url>https://repo1.maven.org/maven2</url>
<releases>
<enabled>true</enabled>
</releases>
<!--
<snapshots>
<enabled>false</enabled>
</snapshots>
-->
</repository>
<repository>
<id>apache-repo</id>
<name>Apache Repository</name>
<url>https://repository.apache.org/content/repositories/releases</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>jboss-repo</id>
<name>JBoss Repository</name>
<url>https://repository.jboss.org/nexus/content/repositories/releases</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>mqtt-repo</id>
<name>MQTT Repository</name>
<url>https://repo.eclipse.org/content/repositories/paho-releases</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>spring-releases</id>
<name>Spring Release Repository</name>
<url>https://repo.spring.io/libs-release</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>typesafe</id>
<name>Typesafe Repository</name>
<url>http://repo.typesafe.com/typesafe/releases/</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>central</id>
<url>https://repo1.maven.org/maven2</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</pluginRepository>
</pluginRepositories>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>jline</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-actors</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scalap</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>2.2.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalamacros</groupId>
<artifactId>quasiquotes_${scala.binary.version}</artifactId>
<version>${scala.macros.version}</version>
</dependency>
<dependency>
<groupId>org.json4s</groupId>
<artifactId>json4s-native_2.10</artifactId>
<version>3.2.11</version>
</dependency>
<dependency>
<groupId>com.github.nscala-time</groupId>
<artifactId>nscala-time_2.10</artifactId>
<version>1.6.0</version>
</dependency>
<dependency>
<groupId>com.typesafe</groupId>
<artifactId>config</artifactId>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_2.10</artifactId>
<version>${akka.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-testkit_2.10</artifactId>
<version>${akka.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_2.10</artifactId>
<version>${akka.version}</version>
</dependency>
<dependency>
<groupId>io.spray</groupId>
<artifactId>spray-routing</artifactId>
<version>${spray.version}</version>
</dependency>
<dependency>
<groupId>io.spray</groupId>
<artifactId>spray-can</artifactId>
<version>${spray.version}</version>
</dependency>
<dependency>
<groupId>io.spray</groupId>
<artifactId>spray-httpx</artifactId>
<version>${spray.version}</version>
</dependency>
<dependency>
<groupId>io.spray</groupId>
<artifactId>spray-testkit</artifactId>
<version>${spray.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.10</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.2.0</version>
<executions>
<execution>
<id>scala-compile-first</id>
<phase>process-resources</phase>
<goals>
<goal>compile</goal>
</goals>
</execution>
<execution>
<id>scala-test-compile-first</id>
<phase>process-test-resources</phase>
<goals>
<goal>testCompile</goal>
</goals>
</execution>
<execution>
<id>attach-scaladocs</id>
<phase>verify</phase>
<goals>
<goal>doc-jar</goal>
</goals>
</execution>
</executions>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
<recompileMode>incremental</recompileMode>
<useZincServer>true</useZincServer>
<args>
<arg>-unchecked</arg>
<arg>-deprecation</arg>
<arg>-feature</arg>
</args>
<jvmArgs>
<jvmArg>-Xms1024m</jvmArg>
<jvmArg>-Xmx1024m</jvmArg>
<jvmArg>-XX:PermSize=${PermGen}</jvmArg>
<jvmArg>-XX:MaxPermSize=${MaxPermGen}</jvmArg>
</jvmArgs>
<javacArgs>
<javacArg>-source</javacArg>
<javacArg>${java.version}</javacArg>
<javacArg>-target</javacArg>
<javacArg>${java.version}</javacArg>
</javacArgs>
<!-- The following plugin is required to use quasiquotes in Scala 2.10 and is used
by Spark SQL for code generation. -->
<compilerPlugins>
<compilerPlugin>
<groupId>org.scalamacros</groupId>
<artifactId>paradise_${scala.version}</artifactId>
<version>${scala.macros.version}</version>
</compilerPlugin>
</compilerPlugins>
</configuration>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
package org.apache.metadata;
public interface IStruct {
String getTypeName();
Object get(String attrName) throws MetadataException;
void set(String attrName, Object val) throws MetadataException;
}
package org.apache.metadata;
public class MetadataException extends Exception {
public MetadataException() {
}
public MetadataException(String message) {
super(message);
}
public MetadataException(String message, Throwable cause) {
super(message, cause);
}
public MetadataException(Throwable cause) {
super(cause);
}
public MetadataException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
package org.apache.metadata;
import org.apache.metadata.storage.IRepository;
import org.apache.metadata.types.TypeSystem;
public class MetadataService {
final IRepository repo;
final TypeSystem typeSystem;
public static final ThreadLocal<MetadataService> currentSvc = new ThreadLocal<MetadataService>();
public static void setCurrentService(MetadataService svc) {
currentSvc.set(svc);
}
public static MetadataService getCurrentService() throws MetadataException {
MetadataService m = currentSvc.get();
if ( m == null ) {
throw new MetadataException("No MetadataService associated with current thread");
}
return m;
}
public static IRepository getCurrentRepository() throws MetadataException {
MetadataService m = currentSvc.get();
IRepository r = m == null ? null : m.getRepository();
if ( r == null ) {
throw new MetadataException("No Repository associated with current thread");
}
return r;
}
public static TypeSystem getCurrentTypeSystem() throws MetadataException {
MetadataService m = currentSvc.get();
TypeSystem t = m == null ? null : m.getTypeSystem();
if ( t == null ) {
throw new MetadataException("No TypeSystem associated with current thread");
}
return t;
}
public MetadataService(IRepository repo, TypeSystem typeSystem) {
this.typeSystem = typeSystem;
this.repo = repo;
}
public IRepository getRepository() {
return repo;
}
public TypeSystem getTypeSystem() {
return typeSystem;
}
}
package org.apache.metadata;
import java.util.HashMap;
import java.util.Map;
public class Struct implements IStruct {
public final String typeName;
private final Map<String, Object> values;
public Struct(String typeName) {
this.typeName = typeName;
values = new HashMap<String, Object>();
}
/**
@nopublic
*/
public Struct(String typeName, Map<String, Object> values) {
this(typeName);
this.values.putAll(values);
}
@Override
public String getTypeName() {
return typeName;
}
@Override
public Object get(String attrName) {
return values.get(attrName);
}
@Override
public void set(String attrName, Object value) {
values.put(attrName, value);
}
/**
* @nopublic
* @return
*/
public Map<String, Object> getValuesMap() {
return values;
}
}
package org.apache.metadata.storage;
import java.text.DateFormat;
public interface IRepository {
DateFormat getDateFormat();
DateFormat getTimestampFormat();
boolean allowNullsInCollections();
}
package org.apache.metadata.storage;
public class Id {
public static final int UNASSIGNED = -1;
public final int id;
public final String className;
public Id(int id, String className) {
this.id = id;
this.className = className;
}
public Id(String className) {
this(UNASSIGNED, className);
}
public boolean isUnassigned() {
return id == UNASSIGNED;
}
}
package org.apache.metadata.storage;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.metadata.IStruct;
import org.apache.metadata.MetadataException;
import org.apache.metadata.types.StructType;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;
public class TypedStruct implements IStruct {
public final StructType dataType;
public final boolean nullFlags[];
public final boolean[] bools;
public final byte[] bytes;
public final short[] shorts;
public final int[] ints;
public final long[] longs;
public final float[] floats;
public final double[] doubles;
public final BigDecimal[] bigDecimals;
public final BigInteger[] bigIntegers;
public final Date[] dates;
public final String[] strings;
public final ImmutableList<Object>[] arrays;
public final ImmutableMap<Object,Object>[] maps;
public final TypedStruct[] structs;
public TypedStruct(StructType dataType, boolean[] nullFlags, boolean[] bools, byte[] bytes, short[] shorts, int[] ints,
long[] longs, float[] floats, double[] doubles,
BigDecimal[] bigDecimals, BigInteger[] bigIntegers, Date[] dates, String[] strings,
ImmutableList<Object>[] arrays, ImmutableMap<Object, Object>[] maps, TypedStruct[] structs) {
assert dataType != null;
this.dataType = dataType;
this.nullFlags = nullFlags;
this.bools = bools;
this.bytes = bytes;
this.shorts = shorts;
this.ints = ints;
this.longs = longs;
this.floats = floats;
this.doubles = doubles;
this.bigDecimals = bigDecimals;
this.bigIntegers = bigIntegers;
this.dates = dates;
this.strings = strings;
this.arrays = arrays;
this.maps = maps;
this.structs = structs;
for(int i=0; i<nullFlags.length; i++) {
nullFlags[i] = true;
}
}
@Override
public String getTypeName() {
return dataType.getName();
}
@Override
public Object get(String attrName) throws MetadataException {
return dataType.get(this, attrName);
}
@Override
public void set(String attrName, Object val) throws MetadataException {
dataType.set(this, attrName, val);
}
@Override
public String toString() {
try {
StringBuilder b = new StringBuilder();
dataType.output(this, b, "");
return b.toString();
} catch(MetadataException me) {
throw new RuntimeException(me);
}
}
}
package org.apache.metadata.storage.memory;
import org.apache.metadata.storage.IRepository;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
public class MemRepository implements IRepository {
private static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
private static SimpleDateFormat timestampFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
@Override
public DateFormat getDateFormat() {
return dateFormat;
}
@Override
public DateFormat getTimestampFormat() {
return timestampFormat;
}
@Override
public boolean allowNullsInCollections() {
return false;
}
}
package org.apache.metadata.types;
import org.apache.metadata.MetadataException;
import java.io.IOException;
abstract class AbstractDataType<T> implements IDataType<T> {
protected T convertNull( Multiplicity m) throws MetadataException {
if (!m.nullAllowed() ) {
throw new ValueConversionException.NullConversionException(m);
}
return null;
}
protected void outputVal(String val, Appendable buf, String prefix) throws MetadataException {
try {
buf.append(prefix).append(val);
} catch(IOException ie) {
throw new MetadataException(ie);
}
}
@Override
public void output(T val, Appendable buf, String prefix) throws MetadataException {
outputVal(val == null ? "<null>" : val.toString(), buf, prefix);
}
}
package org.apache.metadata.types;
public final class AttributeDefinition {
public final String name;
public final String dataTypeName;
public final Multiplicity multiplicity;
public final boolean isComposite;
/**
* If this is a reference attribute, then the name of the attribute on the Class
* that this refers to.
*/
public final String reverseAttributeName;
public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity,
boolean isComposite, String reverseAttributeName) {
this.name = name;
this.dataTypeName = dataTypeName;
this.multiplicity = multiplicity;
this.isComposite = isComposite;
this.reverseAttributeName = reverseAttributeName;
}
}
package org.apache.metadata.types;
import org.apache.metadata.MetadataException;
public class AttributeInfo {
public final String name;
private IDataType dataType;
public final Multiplicity multiplicity;
public final boolean isComposite;
/**
* If this is a reference attribute, then the name of the attribute on the Class
* that this refers to.
*/
public final String reverseAttributeName;
AttributeInfo(TypeSystem t, AttributeDefinition def) throws MetadataException {
this.name = def.name;
this.dataType = t.getDataType(def.dataTypeName);
this.multiplicity = def.multiplicity;
this.isComposite = def.isComposite;
this.reverseAttributeName = def.reverseAttributeName;
}
public IDataType dataType() {
return dataType;
}
void setDataType(IDataType dT) {
dataType = dT;
}
}
package org.apache.metadata.types;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableCollection.Builder;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.metadata.storage.IRepository;
import org.apache.metadata.MetadataException;
import org.apache.metadata.MetadataService;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.text.ParseException;
import java.util.*;
public class DataTypes {
public static enum TypeCategory {
PRIMITIVE,
ARRAY,
MAP,
STRUCT,
TRAIT,
CLASS;
};
static abstract class PrimitiveType<T> extends AbstractDataType<T> {
@Override
public TypeCategory getTypeCategory() {
return TypeCategory.PRIMITIVE;
}
}
public static BooleanType BOOLEAN_TYPE = new BooleanType();
public static class BooleanType extends PrimitiveType<Boolean> {
private static final String name = "boolean".intern();
private BooleanType() {}
@Override
public String getName() {
return name;
}
@Override
public Boolean convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
if ( val instanceof Boolean) {
return (Boolean)val;
} else if ( val instanceof String) {
return Boolean.parseBoolean((String)val);
} else if ( val instanceof Number ) {
return ((Number)val).intValue() != 0;
} else {
throw new ValueConversionException(this, val);
}
}
return convertNull(m);
}
}
public static ByteType BYTE_TYPE = new ByteType();
public static class ByteType extends PrimitiveType<Byte> {
private static final String name = "byte".intern();
private ByteType() {}
@Override
public String getName() {
return name;
}
@Override
public Byte convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
if ( val instanceof Byte) {
return (Byte) val;
} else if ( val instanceof String) {
return Byte.parseByte((String) val);
} else if ( val instanceof Number ) {
return ((Number)val).byteValue();
} else {
throw new ValueConversionException(this, val);
}
}
return convertNull(m);
}
}
public static ShortType SHORT_TYPE = new ShortType();
public static class ShortType extends PrimitiveType<Short> {
private static final String name = "short".intern();
private ShortType() {}
@Override
public String getName() {
return name;
}
@Override
public Short convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
if ( val instanceof Short) {
return (Short) val;
} else if ( val instanceof String) {
return Short.parseShort((String) val);
} else if ( val instanceof Number ) {
return ((Number)val).shortValue();
} else {
throw new ValueConversionException(this, val);
}
}
return convertNull(m);
}
}
public static IntType INT_TYPE = new IntType();
public static class IntType extends PrimitiveType<Integer> {
private static final String name = "int".intern();
private IntType() {}
@Override
public String getName() {
return name;
}
@Override
public Integer convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
if ( val instanceof Integer) {
return (Integer) val;
} else if ( val instanceof String) {
return Integer.parseInt((String) val);
} else if ( val instanceof Number ) {
return ((Number)val).intValue();
} else {
throw new ValueConversionException(this, val);
}
}
return convertNull(m);
}
}
public static LongType LONG_TYPE = new LongType();
public static class LongType extends PrimitiveType<Long> {
private static final String name = "long".intern();
private LongType() {}
@Override
public String getName() {
return name;
}
@Override
public Long convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
if ( val instanceof Long) {
return (Long) val;
} else if ( val instanceof String) {
return Long.parseLong((String) val);
} else if ( val instanceof Number ) {
return ((Number)val).longValue();
} else {
throw new ValueConversionException(this, val);
}
}
return convertNull(m);
}
}
public static FloatType FLOAT_TYPE = new FloatType();
public static class FloatType extends PrimitiveType<Float> {
private static final String name = "float".intern();
private FloatType() {}
@Override
public String getName() {
return name;
}
@Override
public Float convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
if ( val instanceof Float) {
return (Float) val;
} else if ( val instanceof String) {
return Float.parseFloat((String) val);
} else if ( val instanceof Number ) {
return ((Number)val).floatValue();
} else {
throw new ValueConversionException(this, val);
}
}
return convertNull(m);
}
}
public static DoubleType DOUBLE_TYPE = new DoubleType();
public static class DoubleType extends PrimitiveType<Double> {
private static final String name = "double".intern();
private DoubleType() {}
@Override
public String getName() {
return name;
}
@Override
public Double convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
if ( val instanceof Double) {
return (Double) val;
} else if ( val instanceof String) {
return Double.parseDouble((String) val);
} else if ( val instanceof Number ) {
return ((Number)val).doubleValue();
} else {
throw new ValueConversionException(this, val);
}
}
return convertNull(m);
}
}
public static BigIntegerType BIGINTEGER_TYPE = new BigIntegerType();
public static class BigIntegerType extends PrimitiveType<BigInteger> {
private static final String name = "biginteger".intern();
private BigIntegerType() {}
@Override
public String getName() {
return name;
}
@Override
public BigInteger convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
if ( val instanceof BigInteger) {
return (BigInteger) val;
} else if ( val instanceof String) {
try {
return new BigInteger((String) val);
} catch(NumberFormatException ne) {
throw new ValueConversionException(this, val, ne);
}
} else if ( val instanceof Number ) {
return BigInteger.valueOf(((Number) val).longValue());
} else if ( val instanceof BigDecimal) {
return ((BigDecimal)val).toBigInteger();
} else {
throw new ValueConversionException(this, val);
}
}
return convertNull(m);
}
}
public static BigDecimalType BIGDECIMAL_TYPE = new BigDecimalType();
public static class BigDecimalType extends PrimitiveType<BigDecimal> {
private static final String name = "bigdecimal".intern();
private BigDecimalType() {}
@Override
public String getName() {
return name;
}
@Override
public BigDecimal convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
if ( val instanceof BigDecimal) {
return (BigDecimal) val;
} else if ( val instanceof String) {
try {
return new BigDecimal((String) val);
} catch(NumberFormatException ne) {
throw new ValueConversionException(this, val, ne);
}
} else if ( val instanceof Number ) {
return new BigDecimal(((Number)val).doubleValue());
} else if ( val instanceof BigInteger) {
return new BigDecimal((BigInteger)val);
} else {
throw new ValueConversionException(this, val);
}
}
return convertNull(m);
}
}
public static DateType DATE_TYPE = new DateType();
public static class DateType extends PrimitiveType<Date> {
private static final String name = "date".intern();
private DateType() {}
@Override
public String getName() {
return name;
}
@Override
public Date convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
if ( val instanceof Date) {
return (Date) val;
} else if ( val instanceof String) {
try {
return MetadataService.getCurrentRepository().getDateFormat().parse((String)val);
} catch(ParseException ne) {
throw new ValueConversionException(this, val, ne);
}
} else if ( val instanceof Number ) {
return new Date(((Number)val).longValue());
} else {
throw new ValueConversionException(this, val);
}
}
return convertNull(m);
}
}
public static StringType STRING_TYPE = new StringType();
public static class StringType extends PrimitiveType<String> {
private static final String name = "string".intern();
private StringType() {}
@Override
public String getName() {
return name;
}
@Override
public String convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
return val.toString();
}
return convertNull(m);
}
}
static String ARRAY_TYPE_PREFIX = "array<";
static String ARRAY_TYPE_SUFFIX = ">";
public static class ArrayType extends AbstractDataType<ImmutableCollection<?>> {
private final IDataType elemType;
private final String nm;
public ArrayType(IDataType elemType) {
assert elemType != null;
this.elemType = elemType;
this.nm = String.format("%s%s%s", ARRAY_TYPE_PREFIX, elemType.getName(), ARRAY_TYPE_SUFFIX);
}
public IDataType getElemType() {
return elemType;
}
@Override
public String getName() {
return nm;
}
@Override
public ImmutableCollection<?> convert(Object val, Multiplicity m) throws MetadataException {
IRepository r = MetadataService.getCurrentRepository();
if ( val != null ) {
Iterator it = null;
if ( val instanceof Collection ) {
it = ((Collection)val).iterator();
} else if ( val instanceof Iterable ) {
it = ((Iterable)val).iterator();
} else if ( val instanceof Iterator) {
it = (Iterator)val;
}
if ( it != null ) {
ImmutableCollection.Builder<?> b = m.isUnique ? ImmutableSet.builder() : ImmutableList.builder();
while (it.hasNext() ) {
b.add(elemType.convert(it.next(),
r.allowNullsInCollections() ? Multiplicity.OPTIONAL : Multiplicity.REQUIRED));
}
return b.build();
}
else {
try {
return ImmutableList.of(elemType.convert(val,
r.allowNullsInCollections() ? Multiplicity.OPTIONAL : Multiplicity.REQUIRED));
} catch(Exception e) {
throw new ValueConversionException(this, val, e);
}
}
}
if (!m.nullAllowed() ) {
throw new ValueConversionException.NullConversionException(m);
}
return null;
}
@Override
public TypeCategory getTypeCategory() {
return TypeCategory.ARRAY;
}
}
static String MAP_TYPE_PREFIX = "map<";
static String MAP_TYPE_SUFFIX = ">";
public static class MapType extends AbstractDataType<ImmutableMap<?, ?>> {
private final IDataType keyType;
private final IDataType valueType;
private final String nm;
public MapType(IDataType keyType, IDataType valueType) {
assert keyType != null;
assert valueType != null;
this.keyType = keyType;
this.valueType = valueType;
this.nm = String.format("%s%s,%s%s", MAP_TYPE_PREFIX,
keyType.getName(), valueType.getName(), MAP_TYPE_SUFFIX);
}
public IDataType getKeyType() {
return keyType;
}
public IDataType getValueType() {
return valueType;
}
@Override
public String getName() {
return nm;
}
@Override
public ImmutableMap<?, ?> convert(Object val, Multiplicity m) throws MetadataException {
IRepository r = MetadataService.getCurrentRepository();
if ( val != null ) {
Iterator<Map.Entry> it = null;
if ( Map.class.isAssignableFrom(val.getClass())) {
it = ((Map)val).entrySet().iterator();
ImmutableMap.Builder b = ImmutableMap.builder();
while (it.hasNext() ) {
Map.Entry e = it.next();
b.put(keyType.convert(e.getKey(),
r.allowNullsInCollections() ? Multiplicity.OPTIONAL : Multiplicity.REQUIRED),
valueType.convert(e.getValue(),
r.allowNullsInCollections() ? Multiplicity.OPTIONAL : Multiplicity.REQUIRED));
}
return b.build();
}
else {
throw new ValueConversionException(this, val);
}
}
if (!m.nullAllowed() ) {
throw new ValueConversionException.NullConversionException(m);
}
return null;
}
@Override
public TypeCategory getTypeCategory() {
return TypeCategory.MAP;
}
}
}
package org.apache.metadata.types;
import org.apache.metadata.types.DataTypes.TypeCategory;
import org.apache.metadata.MetadataException;
public interface IDataType<T> {
String getName();
T convert(Object val, Multiplicity m) throws MetadataException;
TypeCategory getTypeCategory();
void output(T val, Appendable buf, String prefix) throws MetadataException;
}
package org.apache.metadata.types;
public final class Multiplicity {
public final int lower;
public final int upper;
public final boolean isUnique;
private Multiplicity(int lower, int upper, boolean isUnique) {
assert lower >= 0;
assert upper >= 1;
assert upper >= lower;
this.lower = lower;
this.upper = upper;
this.isUnique = isUnique;
}
public boolean nullAllowed() {
return lower == 0;
}
@Override
public String toString() {
return "Multiplicity{" +
"lower=" + lower +
", upper=" + upper +
", isUnique=" + isUnique +
'}';
}
public static final Multiplicity OPTIONAL = new Multiplicity(0, 1, false);
public static final Multiplicity REQUIRED = new Multiplicity(1, 1, false);
public static final Multiplicity COLLECTION = new Multiplicity(1, Integer.MAX_VALUE, false);
public static final Multiplicity SET = new Multiplicity(1, Integer.MAX_VALUE, true);
}
package org.apache.metadata.types;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.metadata.IStruct;
import org.apache.metadata.storage.IRepository;
import org.apache.metadata.MetadataException;
import org.apache.metadata.MetadataService;
import org.apache.metadata.Struct;
import org.apache.metadata.storage.TypedStruct;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.*;
public class StructType extends AbstractDataType<IStruct> {
public final String name;
public final Map<String,AttributeInfo> fields;
private final Map<String, Integer> fieldPos;
private final Map<String, Integer> fieldNullPos;
public final int numBools;
public final int numBytes;
public final int numShorts;
public final int numInts;
public final int numLongs;
public final int numFloats;
public final int numDoubles;
public final int numBigInts;
public final int numBigDecimals;
public final int numDates;
public final int numStrings;
public final int numArrays;
public final int numMaps;
public final int numStructs;
/**
* Used when creating a StructType, to support recursive Structs.
*/
StructType(String name) {
this.name = name;
this.fields = new LinkedHashMap<String, AttributeInfo>();
this.fieldPos = new HashMap<String, Integer>();
fieldNullPos = new HashMap<String, Integer>();
numBools = 0;
numBytes = 0;
numShorts = 0;
numInts = 0;
numLongs = 0;
numFloats = 0;
numDoubles = 0;
numBigInts = 0;
numBigDecimals = 0;
numDates = 0;
numStrings = 0;
numArrays = 0;
numMaps = 0;
numStructs = 0;
}
StructType(String name, AttributeInfo... fields) throws MetadataException {
this.name = name;
this.fields = new LinkedHashMap<String, AttributeInfo>();
this.fieldPos = new HashMap<String, Integer>();
fieldNullPos = new HashMap<String, Integer>();
int numBools = 0;
int numBytes = 0;
int numShorts = 0;
int numInts = 0;
int numLongs = 0;
int numFloats = 0;
int numDoubles = 0;
int numBigInts = 0;
int numBigDecimals = 0;
int numDates = 0;
int numStrings = 0;
int numArrays = 0;
int numMaps = 0;
int numStructs = 0;
for(AttributeInfo i : fields) {
if ( this.fields.containsKey(i.name) ) {
throw new MetadataException(
String.format("Struct defintion cannot contain multiple fields with the same name %s", i.name));
}
this.fields.put(i.name, i);
this.fieldNullPos.put(i.name, fieldNullPos.size());
if ( i.dataType() == DataTypes.BOOLEAN_TYPE ) {
fieldPos.put(i.name, numBools);
numBools++;
} else if ( i.dataType() == DataTypes.BYTE_TYPE ) {
fieldPos.put(i.name, numBytes);
numBytes++;
} else if ( i.dataType() == DataTypes.SHORT_TYPE ) {
fieldPos.put(i.name, numShorts);
numShorts++;
} else if ( i.dataType() == DataTypes.INT_TYPE ) {
fieldPos.put(i.name, numInts);
numInts++;
} else if ( i.dataType() == DataTypes.LONG_TYPE ) {
fieldPos.put(i.name, numLongs);
numLongs++;
} else if ( i.dataType() == DataTypes.FLOAT_TYPE ) {
fieldPos.put(i.name, numFloats);
numFloats++;
} else if ( i.dataType() == DataTypes.DOUBLE_TYPE ) {
fieldPos.put(i.name, numDoubles);
numDoubles++;
} else if ( i.dataType() == DataTypes.BIGINTEGER_TYPE ) {
fieldPos.put(i.name, numBigInts);
numBigInts++;
} else if ( i.dataType() == DataTypes.BIGDECIMAL_TYPE ) {
fieldPos.put(i.name, numBigDecimals);
numBigDecimals++;
} else if ( i.dataType() == DataTypes.DATE_TYPE ) {
fieldPos.put(i.name, numDates);
numDates++;
} else if ( i.dataType() == DataTypes.STRING_TYPE ) {
fieldPos.put(i.name, numStrings);
numStrings++;
} else if ( i.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY ) {
fieldPos.put(i.name, numArrays);
numArrays++;
} else if ( i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP ) {
fieldPos.put(i.name, numMaps);
numMaps++;
} else if ( i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT ) {
fieldPos.put(i.name, numStructs);
numStructs++;
} else {
throw new MetadataException(String.format("Unknown datatype %s", i.dataType()));
}
}
this.numBools = numBools;
this.numBytes = numBytes;
this.numShorts = numShorts;
this.numInts = numInts;
this.numLongs = numLongs;
this.numFloats = numFloats;
this.numDoubles = numDoubles;
this.numBigInts = numBigInts;
this.numBigDecimals = numBigDecimals;
this.numDates = numDates;
this.numStrings = numStrings;
this.numArrays = numArrays;
this.numMaps = numMaps;
this.numStructs = numStructs;
}
@Override
public String getName() {
return name;
}
@Override
public TypedStruct convert(Object val, Multiplicity m) throws MetadataException {
if ( val != null ) {
if ( val instanceof Struct ) {
Struct s = (Struct) val;
if ( s.typeName != name ) {
throw new ValueConversionException(this, val);
}
TypedStruct ts = createInstance();
for(AttributeInfo i : fields.values()) {
Object aVal = s.get(i.name);
try {
set(ts, i.name, aVal);
} catch(ValueConversionException ve) {
throw new ValueConversionException(this, val, ve);
}
}
return ts;
} else if ( val instanceof TypedStruct && ((TypedStruct)val).dataType == this ) {
return (TypedStruct) val;
} else {
throw new ValueConversionException(this, val);
}
}
if (!m.nullAllowed() ) {
throw new ValueConversionException.NullConversionException(m);
}
return null;
}
@Override
public DataTypes.TypeCategory getTypeCategory() {
return DataTypes.TypeCategory.STRUCT;
}
public TypedStruct createInstance() {
return new TypedStruct(this,
new boolean[fields.size()],
numBools == 0 ? null : new boolean[numBools],
numBytes == 0 ? null : new byte[numBytes],
numShorts == 0 ? null : new short[numShorts],
numInts == 0 ? null : new int[numInts],
numLongs == 0 ? null : new long[numLongs],
numFloats == 0 ? null : new float[numFloats],
numDoubles == 0 ? null : new double[numDoubles],
numBigDecimals == 0 ? null : new BigDecimal[numBigDecimals],
numBigInts == 0 ? null : new BigInteger[numBigInts],
numDates == 0 ? null : new Date[numDates],
numStrings == 0 ? null : new String[numStrings],
numArrays == 0 ? null : new ImmutableList[numArrays],
numMaps == 0 ? null : new ImmutableMap[numMaps],
numStructs == 0 ? null : new TypedStruct[numStructs]);
}
public void set(TypedStruct s, String attrName, Object val) throws MetadataException {
AttributeInfo i = fields.get(attrName);
if ( i == null ) {
throw new ValueConversionException(this, val, "Unknown field " + attrName);
}
int pos = fieldPos.get(attrName);
int nullPos = fieldNullPos.get(attrName);
Object cVal = i.dataType().convert(val, i.multiplicity);
if ( cVal == null ) {
s.nullFlags[nullPos] = true;
return;
}
s.nullFlags[nullPos] = false;
if ( i.dataType() == DataTypes.BOOLEAN_TYPE ) {
s.bools[pos] = ((Boolean)cVal).booleanValue();
} else if ( i.dataType() == DataTypes.BYTE_TYPE ) {
s.bytes[pos] = ((Byte)cVal).byteValue();
} else if ( i.dataType() == DataTypes.SHORT_TYPE ) {
s.shorts[pos] = ((Short)cVal).shortValue();
} else if ( i.dataType() == DataTypes.INT_TYPE ) {
s.ints[pos] = ((Integer)cVal).intValue();
} else if ( i.dataType() == DataTypes.LONG_TYPE ) {
s.longs[pos] = ((Long)cVal).longValue();
} else if ( i.dataType() == DataTypes.FLOAT_TYPE ) {
s.floats[pos] = ((Float)cVal).floatValue();
} else if ( i.dataType() == DataTypes.DOUBLE_TYPE ) {
s.doubles[pos] = ((Double)cVal).doubleValue();
} else if ( i.dataType() == DataTypes.BIGINTEGER_TYPE ) {
s.bigIntegers[pos] = (BigInteger) cVal;
} else if ( i.dataType() == DataTypes.BIGDECIMAL_TYPE ) {
s.bigDecimals[pos] = (BigDecimal) cVal;
} else if ( i.dataType() == DataTypes.DATE_TYPE ) {
s.dates[pos] = (Date) cVal;
} else if ( i.dataType() == DataTypes.STRING_TYPE ) {
s.strings[pos] = (String) cVal;
} else if ( i.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY ) {
s.arrays[pos] = (ImmutableList) cVal;
} else if ( i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP ) {
s.maps[pos] = (ImmutableMap) cVal;
} else if ( i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT ) {
s.structs[pos] = (TypedStruct) cVal;
} else {
throw new MetadataException(String.format("Unknown datatype %s", i.dataType()));
}
}
public Object get(TypedStruct s, String attrName) throws MetadataException {
AttributeInfo i = fields.get(attrName);
if ( i == null ) {
throw new MetadataException(String.format("Unknown field %s for Struct %s", attrName, this.getName()));
}
int pos = fieldPos.get(attrName);
int nullPos = fieldNullPos.get(attrName);
if ( s.nullFlags[nullPos]) {
return null;
}
if ( i.dataType() == DataTypes.BOOLEAN_TYPE ) {
return s.bools[pos];
} else if ( i.dataType() == DataTypes.BYTE_TYPE ) {
return s.bytes[pos];
} else if ( i.dataType() == DataTypes.SHORT_TYPE ) {
return s.shorts[pos];
} else if ( i.dataType() == DataTypes.INT_TYPE ) {
return s.ints[pos];
} else if ( i.dataType() == DataTypes.LONG_TYPE ) {
return s.longs[pos];
} else if ( i.dataType() == DataTypes.FLOAT_TYPE ) {
return s.floats[pos];
} else if ( i.dataType() == DataTypes.DOUBLE_TYPE ) {
return s.doubles[pos];
} else if ( i.dataType() == DataTypes.BIGINTEGER_TYPE ) {
return s.bigIntegers[pos];
} else if ( i.dataType() == DataTypes.BIGDECIMAL_TYPE ) {
return s.bigDecimals[pos];
} else if ( i.dataType() == DataTypes.DATE_TYPE ) {
return s.dates[pos];
} else if ( i.dataType() == DataTypes.STRING_TYPE ) {
return s.strings[pos];
} else if ( i.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY ) {
return s.arrays[pos];
} else if ( i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP ) {
return s.maps[pos];
} else if ( i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT ) {
return s.structs[pos];
} else {
throw new MetadataException(String.format("Unknown datatype %s", i.dataType()));
}
}
@Override
public void output(IStruct s, Appendable buf, String prefix) throws MetadataException {
outputVal("{", buf, prefix);
if ( s == null ) {
outputVal("<null>\n", buf, "");
return;
}
outputVal("\n", buf, "");
String fieldPrefix = prefix + "\t";
for(AttributeInfo i : fields.values()) {
Object aVal = s.get(i.name);
outputVal(i.name + " : ", buf, fieldPrefix);
i.dataType().output(aVal, buf, "");
outputVal("\n", buf, "");
}
outputVal("\n}\n", buf, "");
}
}
package org.apache.metadata.types;
import com.google.common.collect.ImmutableList;
import org.apache.metadata.MetadataException;
import java.util.HashMap;
import java.util.Map;
public class TypeSystem {
private Map<String, IDataType> types;
public TypeSystem() throws MetadataException {
types = new HashMap<String, IDataType>();
registerPrimitiveTypes();
}
public ImmutableList<String> getTypeNames() {
return ImmutableList.copyOf(types.keySet());
}
private void registerPrimitiveTypes() {
types.put(DataTypes.BOOLEAN_TYPE.getName(), DataTypes.BOOLEAN_TYPE);
types.put(DataTypes.BYTE_TYPE.getName(), DataTypes.BYTE_TYPE);
types.put(DataTypes.SHORT_TYPE.getName(), DataTypes.SHORT_TYPE);
types.put(DataTypes.INT_TYPE.getName(), DataTypes.INT_TYPE);
types.put(DataTypes.LONG_TYPE.getName(), DataTypes.LONG_TYPE);
types.put(DataTypes.FLOAT_TYPE.getName(), DataTypes.FLOAT_TYPE);
types.put(DataTypes.DOUBLE_TYPE.getName(), DataTypes.DOUBLE_TYPE);
types.put(DataTypes.BIGINTEGER_TYPE.getName(), DataTypes.BIGINTEGER_TYPE);
types.put(DataTypes.BIGDECIMAL_TYPE.getName(), DataTypes.BIGDECIMAL_TYPE);
types.put(DataTypes.DATE_TYPE.getName(), DataTypes.DATE_TYPE);
types.put(DataTypes.STRING_TYPE.getName(), DataTypes.STRING_TYPE);
}
public IDataType getDataType(String name) throws MetadataException {
if ( types.containsKey(name) ) {
return types.get(name);
}
throw new MetadataException(String.format("Unknown datatype: %s", name));
}
public StructType defineStructType(String name,
boolean errorIfExists,
AttributeDefinition... attrDefs) throws MetadataException {
if ( types.containsKey(name) ) {
throw new MetadataException(String.format("Cannot redefine type %s", name));
}
assert name != null;
AttributeInfo[] infos = new AttributeInfo[attrDefs.length];
Map<Integer, AttributeDefinition> recursiveRefs = new HashMap<Integer, AttributeDefinition>();
try {
types.put(name, new StructType(name));
for (int i = 0; i < attrDefs.length; i++) {
infos[i] = new AttributeInfo(this, attrDefs[i]);
if ( attrDefs[i].dataTypeName == name ) {
recursiveRefs.put(i, attrDefs[i]);
}
}
} catch(MetadataException me) {
types.remove(name);
throw me;
} catch(RuntimeException re) {
types.remove(name);
throw re;
}
StructType sT = new StructType(name, infos);
types.put(name, sT);
for(Map.Entry<Integer, AttributeDefinition> e : recursiveRefs.entrySet()) {
infos[e.getKey()].setDataType(sT);
}
return sT;
}
public DataTypes.ArrayType defineArrayType(IDataType elemType) throws MetadataException {
assert elemType != null;
DataTypes.ArrayType dT = new DataTypes.ArrayType(elemType);
types.put(dT.getName(), dT);
return dT;
}
public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType) throws MetadataException {
assert keyType != null;
assert valueType != null;
DataTypes.MapType dT = new DataTypes.MapType(keyType, valueType);
types.put(dT.getName(), dT);
return dT;
}
}
package org.apache.metadata.types;
import org.apache.metadata.MetadataException;
public class ValueConversionException extends MetadataException {
public ValueConversionException(IDataType typ, Object val) {
this(typ, val, (Throwable) null);
}
public ValueConversionException(IDataType typ, Object val, Throwable t) {
super(String.format("Cannot convert value '%s' to datatype %s", val.toString(), typ.getName()), t);
}
public ValueConversionException(IDataType typ, Object val, String msg) {
super(String.format("Cannot convert value '%s' to datatype %s because: %s",
val.toString(), typ.getName(), msg));
}
protected ValueConversionException(String msg) {
super(msg);
}
public static class NullConversionException extends ValueConversionException {
public NullConversionException(Multiplicity m) {
super(String.format("Null value not allowed for multiplicty %s", m));
}
}
}
akka {
//loggers = [akka.event.slf4j.Slf4jLogger]
loglevel = debug
actor {
debug {
receive = on
lifecycle = on
}
}
}
app {
interface="localhost"
port= 8080
}
\ No newline at end of file
package org.apache.metadata.cli
import org.apache.metadata.MetadataService
import org.apache.metadata.MetadataService
import org.apache.metadata.storage.memory.MemRepository
import org.apache.metadata.types.TypeSystem
import scala.reflect.io.File
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.{IMain, ILoop}
object Console extends App {
val settings = new Settings
settings.usejavacp.value = true
settings.deprecation.value = true
settings.bootclasspath.value += """/Users/hbutani/.m2/repository/org/apache/metadata/1.0-SNAPSHOT/metadata-1.0-SNAPSHOT.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-compiler/2.10.4/scala-compiler-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-reflect/2.10.4/scala-reflect-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/jline/2.10.4/jline-2.10.4.jar:/Users/hbutani/.m2/repository/org/fusesource/jansi/jansi/1.4/jansi-1.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-library/2.10.4/scala-library-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-actors/2.10.4/scala-actors-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scalap/2.10.4/scalap-2.10.4.jar:/Users/hbutani/.m2/repository/org/scalatest/scalatest_2.10/2.2.0/scalatest_2.10-2.2.0.jar:/Users/hbutani/.m2/repository/org/scalamacros/quasiquotes_2.10/2.0.1/quasiquotes_2.10-2.0.1.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-native_2.10/3.2.11/json4s-native_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-core_2.10/3.2.11/json4s-core_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-ast_2.10/3.2.11/json4s-ast_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/com/thoughtworks/paranamer/paranamer/2.6/paranamer-2.6.jar:/Users/hbutani/.m2/repository/com/github/nscala-time/nscala-time_2.10/1.6.0/nscala-time_2.10-1.6.0.jar:/Users/hbutani/.m2/repository/joda-time/joda-time/2.5/joda-time-2.5.jar:/Users/hbutani/.m2/repository/org/joda/joda-convert/1.2/joda-convert-1.2.jar:/Users/hbutani/.m2/repository/com/typesafe/config/1.2.1/config-1.2.1.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-actor_2.10/2.3.7/akka-actor_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-testkit_2.10/2.3.7/akka-testkit_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-slf4j_2.10/2.3.7/akka-slf4j_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.jar:/Users/hbutani/.m2/repository/io/spray/spray-routing/1.3.1/spray-routing-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-http/1.3.1/spray-http-1.3.1.jar:/Users/hbutani/.m2/repository/org/parboiled/parboiled-scala_2.10/1.1.6/parboiled-scala_2.10-1.1.6.jar:/Users/hbutani/.m2/repository/org/parboiled/parboiled-core/1.1.6/parboiled-core-1.1.6.jar:/Users/hbutani/.m2/repository/io/spray/spray-util/1.3.1/spray-util-1.3.1.jar:/Users/hbutani/.m2/repository/com/chuusai/shapeless_2.10/1.2.4/shapeless_2.10-1.2.4.jar:/Users/hbutani/.m2/repository/io/spray/spray-can/1.3.1/spray-can-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-io/1.3.1/spray-io-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-httpx/1.3.1/spray-httpx-1.3.1.jar:/Users/hbutani/.m2/repository/org/jvnet/mimepull/mimepull/1.9.4/mimepull-1.9.4.jar:/Users/hbutani/.m2/repository/io/spray/spray-testkit/1.3.1/spray-testkit-1.3.1.jar:/Users/hbutani/.m2/repository/com/google/guava/guava/11.0.2/guava-11.0.2.jar:/Users/hbutani/.m2/repository/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/Users/hbutani/.m2/repository/junit/junit/4.10/junit-4.10.jar:/Users/hbutani/.m2/repository/org/hamcrest/hamcrest-core/1.1/hamcrest-core-1.1.jar"""
val in = new IMain(settings){
override protected def parentClassLoader = settings.getClass.getClassLoader()
}
new SampleILoop().process(settings)
}
class SampleILoop extends ILoop {
override def prompt = "==> "
//intp = Console.in
val ts: TypeSystem = new TypeSystem
val mr: MemRepository = new MemRepository
val ms : MetadataService = new MetadataService(mr, ts)
MetadataService.setCurrentService(ms)
addThunk {
intp.beQuietDuring {
intp.addImports("java.lang.Math._")
intp.addImports("org.json4s.native.Serialization.{read, write => swrite}")
intp.addImports("org.json4s._")
intp.addImports("org.json4s.native.JsonMethods._")
intp.addImports("org.apache.metadata.dsl._")
intp.bindValue("service", ms)
//intp.bindValue("cp", intp.compilerClasspath)
}
}
override def printWelcome() {
echo("\n" +
" \\,,,/\n" +
" (o o)\n" +
"-----oOOo-(_)-oOOo-----")
}
}
package org.apache.metadata.dsl
import org.apache.metadata.storage.TypedStruct
import scala.language.dynamics
class DynamicTypedStruct(val ts : TypedStruct) extends Dynamic {
def selectDynamic(name: String) = ts.get(name)
def updateDynamic(name: String)(value: Any) {
var value1 = value
if ( value != null && value.isInstanceOf[DynamicTypedStruct]) {
value1 = value.asInstanceOf[DynamicTypedStruct].ts
}
ts.set(name, value1)
}
def dataType = ts.dataType
}
package org.apache.metadata
import org.apache.metadata.json.{BigIntegerSerializer, BigDecimalSerializer, TypedStructSerializer, Serialization}
import org.apache.metadata.storage.TypedStruct
import org.apache.metadata.types._
import scala.collection.JavaConverters._
import org.json4s._
import org.json4s.native.Serialization.{read, write => swrite}
import org.json4s.native.JsonMethods._
import scala.language.implicitConversions
package object dsl {
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new BigDecimalSerializer + new BigIntegerSerializer
def service = MetadataService.getCurrentService
def ts = MetadataService.getCurrentTypeSystem
def repo = MetadataService.getCurrentRepository
val BOOLEAN_TYPE = DataTypes.BOOLEAN_TYPE
val BYTE_TYPE = DataTypes.BYTE_TYPE
val SHORT_TYPE = DataTypes.SHORT_TYPE
val INT_TYPE = DataTypes.INT_TYPE
val LONG_TYPE = DataTypes.LONG_TYPE
val FLOAT_TYPE = DataTypes.FLOAT_TYPE
val DOUBLE_TYPE = DataTypes.DOUBLE_TYPE
val BIGINT_TYPE = DataTypes.BIGINTEGER_TYPE
val BIGDECIMAL_TYPE = DataTypes.BIGDECIMAL_TYPE
val DATE_TYPE = DataTypes.DATE_TYPE
val STRING_TYPE = DataTypes.STRING_TYPE
val ATTR_OPTIONAL = Multiplicity.OPTIONAL
val ATTR_REQUIRED = Multiplicity.REQUIRED
def arrayType(dT : IDataType[_]) = ts.defineArrayType(dT)
def mapType(kT : IDataType[_], vT : IDataType[_]) = ts.defineMapType(kT, vT)
def attrDef(name : String, dT : IDataType[_],
m : Multiplicity = Multiplicity.OPTIONAL,
isComposite: Boolean = false,
reverseAttributeName: String = null) = {
require(name != null)
require(dT != null)
new AttributeDefinition(name, dT.getName, m, isComposite, reverseAttributeName)
}
def listTypes = ts.getTypeNames
def defineStructType(name : String, attrDef : AttributeDefinition*) = {
require(name != null)
ts.defineStructType(name, false, attrDef:_*)
}
def createInstance(typeName : String, jsonStr : String)(implicit formats: Formats) = {
val j = parse(jsonStr)
assert(j.isInstanceOf[JObject])
var j1 = j.asInstanceOf[JObject]
j1 = JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) :: j1.obj)
new DynamicTypedStruct(Extraction.extract[TypedStruct](j1))
}
def createInstance(typeName : String) = {
new DynamicTypedStruct(
ts.getDataType(typeName).asInstanceOf[StructType].createInstance())
}
implicit def dynTypedStructToTypedStruct(s : DynamicTypedStruct) = s.ts
implicit def dynTypedStructToJson(s : DynamicTypedStruct)(implicit formats: Formats) = {
Extraction.decompose(s.ts)(formats)
}
}
package org.apache.metadata.json
import org.apache.metadata.types.DataTypes.{MapType, TypeCategory, ArrayType}
import org.apache.metadata.{MetadataException, MetadataService}
import org.apache.metadata.types._
import org.json4s.JsonAST.JInt
import org.json4s._
import org.json4s.native.Serialization.{read, write => swrite}
import org.json4s.reflect.{ScalaType, Reflector}
import java.util.regex.Pattern
import java.util.Date
import org.apache.metadata.storage.TypedStruct
import collection.JavaConversions._
import scala.collection.JavaConverters._
class BigDecimalSerializer extends CustomSerializer[java.math.BigDecimal](format => ( {
case JDecimal(e) => e.bigDecimal
}, {
case e: java.math.BigDecimal => JDecimal(new BigDecimal(e))
}
))
class BigIntegerSerializer extends CustomSerializer[java.math.BigInteger](format => ( {
case JInt(e) => e.bigInteger
}, {
case e: java.math.BigInteger => JInt(new BigInt(e))
}
))
class TypedStructSerializer extends Serializer[TypedStruct] {
def extractList(lT : ArrayType, value : JArray)(implicit format: Formats) : Any = {
val dT = lT.getElemType
value.arr.map(extract(dT, _)).asJava
}
def extractMap(mT : MapType, value : JObject)(implicit format: Formats) : Any = {
val kT = mT.getKeyType
val vT = mT.getValueType
value.obj.map{f : JField => f._1 -> extract(vT, f._2) }.toMap.asJava
}
def extract(dT : IDataType[_], value : JValue)(implicit format: Formats) : Any = value match {
case value : JBool => Extraction.extract[Boolean](value)
case value : JInt => Extraction.extract[Int](value)
case value : JDouble => Extraction.extract[Double](value)
case value : JDecimal => Extraction.extract[BigDecimal](value)
case value : JString => Extraction.extract[String](value)
case JNull => null
case value : JArray => extractList(dT.asInstanceOf[ArrayType], value.asInstanceOf[JArray])
case value : JObject if dT.getTypeCategory eq TypeCategory.MAP =>
extractMap(dT.asInstanceOf[MapType], value.asInstanceOf[JObject])
case value : JObject =>
Extraction.extract[TypedStruct](value)
}
def deserialize(implicit format: Formats) = {
case (TypeInfo(clazz, ptype), json) if classOf[TypedStruct].isAssignableFrom(clazz) => json match {
case JObject(fs) =>
val(typ, fields) = fs.partition(f => f._1 == Serialization.STRUCT_TYPE_FIELD_NAME)
val typName = typ(0)._2.asInstanceOf[JString].s
val sT = MetadataService.getCurrentTypeSystem().getDataType(typName).asInstanceOf[StructType]
val s = sT.createInstance()
fields.foreach { f =>
val fName = f._1
val fInfo = sT.fields(fName)
if ( fInfo != null ) {
//println(fName)
var v = f._2
if ( fInfo.dataType().isInstanceOf[StructType] ) {
v = v match {
case JObject(sFields) =>
JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(fInfo.dataType.getName)) :: sFields)
case x => x
}
}
s.set(fName, extract(fInfo.dataType(), v))
}
}
s
case x => throw new MappingException("Can't convert " + x + " to TypedStruct")
}
}
/**
* Implicit conversion from `java.math.BigInteger` to `scala.BigInt`.
* match the builtin conversion for BigDecimal.
* See https://groups.google.com/forum/#!topic/scala-language/AFUamvxu68Q
*/
//implicit def javaBigInteger2bigInt(x: java.math.BigInteger): BigInt = new BigInt(x)
def serialize(implicit format: Formats) = {
case e: TypedStruct =>
val fields = e.dataType.fields.map {
case (fName, info) => {
var v = e.get(fName)
if ( v != null && (info.dataType().getTypeCategory eq TypeCategory.MAP) ) {
v = v.asInstanceOf[java.util.Map[_,_]].toMap
}
JField(fName, Extraction.decompose(v))
}
}.toList.map(_.asInstanceOf[JField])
JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(e.dataType.getName)) :: fields)
}
}
object Serialization {
val STRUCT_TYPE_FIELD_NAME = "$typeName$"
}
package org.apache.metadata;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import junit.framework.TestCase;
import org.apache.metadata.storage.IRepository;
import org.apache.metadata.storage.memory.MemRepository;
import org.apache.metadata.types.*;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;
import java.util.Map;
public abstract class BaseTest {
protected MetadataService ms;
public static final String STRUCT_TYPE_1 = "t1";
public static final String STRUCT_TYPE_2 = "t2";
@BeforeClass
public static void setupClass() throws MetadataException {
TypeSystem ts = new TypeSystem();
MemRepository mr = new MemRepository();
MetadataService.setCurrentService(new MetadataService(mr, ts));
StructType structType = ts.defineStructType(STRUCT_TYPE_1,
true,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
createOptionalAttrDef("e", DataTypes.INT_TYPE),
createOptionalAttrDef("f", DataTypes.INT_TYPE),
createOptionalAttrDef("g", DataTypes.LONG_TYPE),
createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
createOptionalAttrDef("l", DataTypes.DATE_TYPE),
createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)));
StructType recursiveStructType = ts.defineStructType(STRUCT_TYPE_2,
true,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("s", STRUCT_TYPE_2));
}
public static Struct createStruct(MetadataService ms) throws MetadataException {
StructType structType = (StructType) ms.getTypeSystem().getDataType(STRUCT_TYPE_1);
Struct s = new Struct(structType.getName());
s.set("a", 1);
s.set("b", true);
s.set("c", (byte)1);
s.set("d", (short)2);
s.set("e", 1);
s.set("f", 1);
s.set("g", 1L);
s.set("h", 1.0f);
s.set("i", 1.0);
s.set("j", BigInteger.valueOf(1L));
s.set("k", new BigDecimal(1));
s.set("l", new Date(System.currentTimeMillis()));
s.set("m", Lists.<Integer>asList(Integer.valueOf(1), new Integer[]{Integer.valueOf(1)}));
s.set("n", Lists.<BigDecimal>asList(BigDecimal.valueOf(1.1), new BigDecimal[] {BigDecimal.valueOf(1.1)}));
Map<String, Double> hm = Maps.<String, Double>newHashMap();
hm.put("a", 1.0);
hm.put("b",2.0);
s.set("o", hm);
return s;
}
@Before
public void setup() throws MetadataException {
ms = MetadataService.getCurrentService();
}
public static AttributeDefinition createOptionalAttrDef(String name,
IDataType dataType
) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.OPTIONAL, false, null);
}
public static AttributeDefinition createOptionalAttrDef(String name,
String dataType
) {
return new AttributeDefinition(name, dataType, Multiplicity.OPTIONAL, false, null);
}
public static AttributeDefinition createRequiredAttrDef(String name,
IDataType dataType
) {
return new AttributeDefinition(name, dataType.getName(), Multiplicity.REQUIRED, false, null);
}
public static AttributeDefinition createRequiredAttrDef(String name,
String dataType
) {
return new AttributeDefinition(name, dataType, Multiplicity.REQUIRED, false, null);
}
}
package org.apache.metadata;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import junit.framework.TestCase;
import org.apache.metadata.storage.TypedStruct;
import org.apache.metadata.types.*;
import org.junit.Before;
import org.junit.Test;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;
import java.util.Map;
public class StructTest extends BaseTest {
StructType structType;
StructType recursiveStructType;
@Before
public void setup() throws MetadataException {
super.setup();
structType = (StructType) ms.getTypeSystem().getDataType(STRUCT_TYPE_1);
recursiveStructType = (StructType) ms.getTypeSystem().getDataType(STRUCT_TYPE_2);
}
@Test
public void test1() throws MetadataException {
Struct s = createStruct(ms);
TypedStruct ts = structType.convert(s, Multiplicity.REQUIRED);
System.out.println(ts);
}
@Test
public void testRecursive() throws MetadataException {
Struct s1 = new Struct(recursiveStructType.getName());
s1.set("a", 1);
Struct s2 = new Struct(recursiveStructType.getName());
s2.set("a", 1);
s2.set("s", s1);
TypedStruct ts = recursiveStructType.convert(s2, Multiplicity.REQUIRED);
System.out.println(ts);
}
}
package org.apache.metadata.dsl
import org.apache.metadata.hive.HiveMockMetadataService
import org.apache.metadata.json.{BigIntegerSerializer, BigDecimalSerializer, TypedStructSerializer}
import org.apache.metadata.storage.TypedStruct
import org.apache.metadata.{Struct, BaseTest}
import org.apache.metadata.types.{IDataType, Multiplicity, StructType}
import org.json4s.NoTypeHints
import org.json4s.native.Serialization._
import org.junit.{Test, Before}
import org.apache.metadata.dsl._
import org.json4s.native.JsonMethods._
class DSLTest extends BaseTest {
@Before
override def setup {
super.setup
}
@Test def test1 {
// 1. Existing Types in System
println(s"Existing Types:\n\t ${listTypes}\n")
defineStructType("mytype",
attrDef("a", INT_TYPE, ATTR_REQUIRED),
attrDef("b", BOOLEAN_TYPE),
attrDef("c", BYTE_TYPE),
attrDef("d", SHORT_TYPE),
attrDef("e", INT_TYPE),
attrDef("f", INT_TYPE),
attrDef("g", LONG_TYPE),
attrDef("h", FLOAT_TYPE),
attrDef("i", DOUBLE_TYPE),
attrDef("j", BIGINT_TYPE),
attrDef("k", BIGDECIMAL_TYPE),
attrDef("l", DATE_TYPE),
attrDef("m", arrayType(INT_TYPE)),
attrDef("n", arrayType(BIGDECIMAL_TYPE)),
attrDef("o", mapType(STRING_TYPE, DOUBLE_TYPE)))
// 2. 'mytype' available as a a Type
println(s"Added Type:\n\t ${listTypes}\n")
// 3. Create a 'mytype' instance from Json
val i = createInstance("mytype", """
{
"$typeName$":"mytype",
"e":1,
"n":[1,1.1],
"h":1.0,
"b":true,
"k":1,
"j":1,
"d":2,
"m":[1,1],
"g":1,
"a":1,
"i":1.0,
"c":1,
"l":"2014-12-03T08:00:00.000Z",
"f":1,
"o":{
"b":2.0,
"a":1.0
}
}
""")
// 4. Navigate mytype instance in code
println("Examples of Navigate mytype instance in code:\n")
println(s"i.a -> ${i.a}")
println(s"i.o -> ${i.o}")
println(s"i.o.keys -> ${i.o.asInstanceOf[java.util.Map[_,_]].keySet}")
// 5. Serialize mytype instance to Json
println(s"\nJSON:\n ${pretty(render(i))}")
}
@Test def test2 {
// 1. Existing Types in System
println(s"Existing Types:\n\t ${listTypes}\n")
val addrType = defineStructType("addressType",
attrDef("houseNum", INT_TYPE, ATTR_REQUIRED),
attrDef("street", STRING_TYPE, ATTR_REQUIRED),
attrDef("city", STRING_TYPE, ATTR_REQUIRED),
attrDef("state", STRING_TYPE, ATTR_REQUIRED),
attrDef("zip", INT_TYPE, ATTR_REQUIRED),
attrDef("country", STRING_TYPE, ATTR_REQUIRED)
)
val personType = defineStructType("personType",
attrDef("first_name", STRING_TYPE, ATTR_REQUIRED),
attrDef("last_name", STRING_TYPE, ATTR_REQUIRED),
attrDef("address", addrType)
)
// 2. updated Types in System
println(s"Updated Types:\n\t ${listTypes}")
// 3. Construct a Person in Code
val person = createInstance("personType")
val address = createInstance("addressType")
person.first_name = "Meta"
person.last_name = "Hadoop"
address.houseNum = 3460
address.street = "W Bayshore Rd"
address.city = "Palo Alto"
address.state = "CA"
address.zip = 94303
address.country = "USA"
person.address = address
// 4. Convert to Json
println(s"\nJSON:\n ${pretty(render(person))}")
val p2 = createInstance("personType", """{
"first_name":"Meta",
"address":{
"houseNum":3460,
"city":"Palo Alto",
"country":"USA",
"state":"CA",
"zip":94303,
"street":"W Bayshore Rd"
},
"last_name":"Hadoop"
}""")
}
@Test def testHive(): Unit = {
val hiveTable = HiveMockMetadataService.getTable("tpcds", "date_dim")
println(hiveTable)
//name : String, typeName : String, comment : String
val fieldType = defineStructType("FieldSchema",
attrDef("name", STRING_TYPE, ATTR_REQUIRED),
attrDef("typeName", STRING_TYPE, ATTR_REQUIRED),
attrDef("comment", STRING_TYPE)
)
/*
SerDe(name : String, serializationLib : String, parameters : Map[String, String])
*/
defineStructType("SerDe",
attrDef("name", STRING_TYPE, ATTR_REQUIRED),
attrDef("serializationLib", STRING_TYPE, ATTR_REQUIRED),
attrDef("parameters", mapType(STRING_TYPE, STRING_TYPE))
)
/*
StorageDescriptor(fields : List[FieldSchema],
location : String, inputFormat : String,
outputFormat : String, compressed : Boolean,
numBuckets : Int, bucketColumns : List[String],
sortColumns : List[String],
parameters : Map[String, String],
storedAsSubDirs : Boolean
)
*/
val sdType = defineStructType("StorageDescriptor",
attrDef("location", STRING_TYPE, ATTR_REQUIRED),
attrDef("inputFormat", STRING_TYPE, ATTR_REQUIRED),
attrDef("outputFormat", STRING_TYPE, ATTR_REQUIRED),
attrDef("compressed", BOOLEAN_TYPE),
attrDef("numBuckets", INT_TYPE),
attrDef("bucketColumns", arrayType(STRING_TYPE)),
attrDef("sortColumns", arrayType(STRING_TYPE)),
attrDef("parameters", mapType(STRING_TYPE, STRING_TYPE)),
attrDef("storedAsSubDirs", BOOLEAN_TYPE)
)
/*
case class Table(dbName : String, tableName : String, storageDesc : StorageDescriptor,
parameters : Map[String, String],
tableType : String)
*/
defineStructType("Table",
attrDef("dbName", STRING_TYPE, ATTR_REQUIRED),
attrDef("tableName", STRING_TYPE, ATTR_REQUIRED),
attrDef("storageDesc", sdType, ATTR_REQUIRED),
attrDef("compressed", BOOLEAN_TYPE),
attrDef("numBuckets", INT_TYPE),
attrDef("bucketColumns", arrayType(STRING_TYPE)),
attrDef("sortColumns", arrayType(STRING_TYPE)),
attrDef("parameters", mapType(STRING_TYPE, STRING_TYPE)),
attrDef("storedAsSubDirs", BOOLEAN_TYPE)
)
}
}
package org.apache.metadata.hive
object HiveMockMetadataService {
case class FieldSchema(name : String, typeName : String, comment : String)
case class SerDe(name : String, serializationLib : String, parameters : Map[String, String])
case class StorageDescriptor(fields : List[FieldSchema],
location : String, inputFormat : String,
outputFormat : String, compressed : Boolean,
numBuckets : Int, bucketColumns : List[String],
sortColumns : List[String],
parameters : Map[String, String],
storedAsSubDirs : Boolean
)
case class Table(dbName : String, tableName : String, storageDesc : StorageDescriptor,
parameters : Map[String, String],
tableType : String)
def getTable(dbName : String, table : String) : Table = {
return Table(dbName, table,
StorageDescriptor(
List[FieldSchema](
FieldSchema("d_date_sk", "int", null),
FieldSchema("d_date_id", "string", null),
FieldSchema("d_date", "string", null),
FieldSchema("d_month_seq", "int", null),
FieldSchema("d_week_seq", "int", null),
FieldSchema("d_quarter_seq", "int", null),
FieldSchema("d_year", "int", null),
FieldSchema("d_dow", "int", null),
FieldSchema("d_moy", "int", null),
FieldSchema("d_dom", "int", null),
FieldSchema("d_qoy", "int", null),
FieldSchema("d_fy_year", "int", null),
FieldSchema("d_fy_quarter_seq", "int", null),
FieldSchema("d_fy_week_seq", "int", null),
FieldSchema("d_day_name", "string", null),
FieldSchema("d_quarter_name", "string", null),
FieldSchema("d_holiday", "string", null),
FieldSchema("d_weekend", "string", null),
FieldSchema("d_following_holiday", "string", null),
FieldSchema("d_first_dom", "int", null),
FieldSchema("d_last_dom", "int", null),
FieldSchema("d_same_day_ly", "int", null),
FieldSchema("d_same_day_lq", "int", null),
FieldSchema("d_current_day", "string", null),
FieldSchema("d_current_week", "string", null),
FieldSchema("d_current_month", "string", null),
FieldSchema("d_current_quarter", "string", null),
FieldSchema("d_current_year", "string", null)
),
"file:/tmp/warehouse/tpcds.db/date_dim",
"org.apache.hadoop.hive.ql.io.orc.OrcInputFormat",
"org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat",
false,
0,List[String](), List[String](),
Map[String,String](),
false
),
Map[String,String](),
"Table")
}
}
package org.apache.metadata.json
import org.apache.metadata.Struct
import org.apache.metadata.storage.TypedStruct
import org.apache.metadata.storage.TypedStruct
import org.apache.metadata.types.Multiplicity
import org.apache.metadata.types.StructType
import org.apache.metadata.{Struct, BaseTest}
import org.apache.metadata.types.{Multiplicity, StructType}
import org.json4s.NoTypeHints
import org.junit.Before
import org.junit.Test
import org.json4s._
import org.json4s.native.Serialization.{read, write => swrite}
import org.json4s.native.JsonMethods._
class SerializationTest extends BaseTest {
private[metadata] var structType: StructType = null
private[metadata] var recursiveStructType: StructType = null
@Before
override def setup {
super.setup
structType = ms.getTypeSystem.getDataType(BaseTest.STRUCT_TYPE_1).asInstanceOf[StructType]
recursiveStructType = ms.getTypeSystem.getDataType(BaseTest.STRUCT_TYPE_2).asInstanceOf[StructType]
}
@Test def test1 {
val s: Struct = BaseTest.createStruct(ms)
val ts: TypedStruct = structType.convert(s, Multiplicity.REQUIRED)
println("Typed Struct :")
println(ts)
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new BigDecimalSerializer + new BigIntegerSerializer
val ser = swrite(ts)
println("Json representation :")
println(ser)
val ts1 = read[TypedStruct](ser)
println("Typed Struct read back:")
println(ts1)
}
@Test def test2 {
val s: Struct = BaseTest.createStruct(ms)
val ts: TypedStruct = structType.convert(s, Multiplicity.REQUIRED)
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new BigDecimalSerializer + new BigIntegerSerializer
val ts1 = read[TypedStruct](
"""
{"$typeName$":"t1","e":1,"n":[1.1,1.1],"h":1.0,"b":true,"k":1,"j":1,"d":2,"m":[1,1],"g":1,"a":1,"i":1.0,
"c":1,"l":"2014-12-03T19:38:55.053Z","f":1,"o":{"b":2.0,"a":1.0}}""")
println("Typed Struct read from string:")
println(ts1)
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment