Commit 8285e3b9 by Harish Butani

cleanup: remove MetadataService

parent 76fcba1a
......@@ -10,10 +10,11 @@ import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.UnknownDBException;
import org.apache.hadoop.hive.metastore.api.UnknownTableException;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataService;
import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.storage.RepositoryException;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.apache.thrift.TException;
/*
* Initial pass at one time importer TODO - needs re-write
......@@ -23,12 +24,12 @@ import org.apache.thrift.TException;
public class HiveMetaImporter {
private static HiveMetaStoreClient msc;
private static MetadataService ms;
private static IRepository repo;
public HiveMetaImporter(MetadataService ms){
public HiveMetaImporter(IRepository repo){
try {
this.ms = ms;
this.repo = repo;
msc = new HiveMetaStoreClient(new HiveConf());
} catch (MetaException e) {
// TODO Auto-generated catch block
......@@ -59,7 +60,7 @@ public class HiveMetaImporter {
public static boolean databasesImport() throws MetaException, RepositoryException{
ClassType classType = null;
try {
classType = ms.getTypeSystem().getDataType(ClassType.class, HiveStructureBridge.DB_CLASS_TYPE);
classType = TypeSystem.getInstance().getDataType(ClassType.class, HiveStructureBridge.DB_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
......@@ -79,7 +80,7 @@ public class HiveMetaImporter {
if(db.isSetOwnerType()){dbRef.set("OWNER_TYPE", db.getOwnerType());}
if(db.isSetOwnerName()){dbRef.set("OWNER_NAME", db.getOwnerName());}
ms.getRepository().create(dbRef);
repo.create(dbRef);
} catch (NoSuchObjectException e) {
// TODO Auto-generated catch block
e.printStackTrace();
......@@ -93,7 +94,7 @@ public class HiveMetaImporter {
public static boolean tablesImport(String dbName) throws MetaException, RepositoryException{
ClassType classType = null;
try {
classType = ms.getTypeSystem().getDataType(ClassType.class, HiveStructureBridge.TB_CLASS_TYPE);
classType = TypeSystem.getInstance().getDataType(ClassType.class, HiveStructureBridge.TB_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
......@@ -115,7 +116,7 @@ public class HiveMetaImporter {
if(tb.isSetViewExpandedText()){tbRef.set("VIEW_EXPANDED_TEXT", tb.getViewExpandedText());}
if(tb.isSetViewOriginalText()){tbRef.set("VIEW_ORIGINAL_TEXT", tb.getViewOriginalText());}
ms.getRepository().create(tbRef);
repo.create(tbRef);
} catch (NoSuchObjectException e) {
// TODO Auto-generated catch block
e.printStackTrace();
......@@ -129,7 +130,7 @@ public class HiveMetaImporter {
public static boolean fieldsImport (String dbName, String tbName) throws MetaException, RepositoryException{
ClassType classType = null;
try {
classType = ms.getTypeSystem().getDataType(ClassType.class, HiveStructureBridge.FD_CLASS_TYPE);
classType = TypeSystem.getInstance().getDataType(ClassType.class, HiveStructureBridge.FD_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
......@@ -140,7 +141,7 @@ public class HiveMetaImporter {
fdRef.set("COLUMN_NAME", fs.getName());
fdRef.set("TYPE_NAME", fs.getType());
ms.getRepository().create(fdRef);
repo.create(fdRef);
}
} catch (UnknownTableException e) {
// TODO Auto-generated catch block
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata;
import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.types.TypeSystem;
// TODO get rid of this class in favor of Dependency injection
public class MetadataService {
final IRepository repo;
final TypeSystem typeSystem;
public static final ThreadLocal<MetadataService> currentSvc = new ThreadLocal<MetadataService>();
public static void setCurrentService(MetadataService svc) {
currentSvc.set(svc);
}
public static MetadataService getCurrentService() throws MetadataException {
MetadataService m = currentSvc.get();
if ( m == null ) {
throw new MetadataException("No MetadataService associated with current thread");
}
return m;
}
public static IRepository getCurrentRepository() throws MetadataException {
MetadataService m = currentSvc.get();
IRepository r = m == null ? null : m.getRepository();
if ( r == null ) {
throw new MetadataException("No Repository associated with current thread");
}
return r;
}
public MetadataService(IRepository repo, TypeSystem typeSystem) {
this.typeSystem = typeSystem;
this.repo = repo;
}
public IRepository getRepository() {
return repo;
}
public TypeSystem getTypeSystem() {
return typeSystem;
}
}
......@@ -20,11 +20,9 @@ package org.apache.hadoop.metadata.storage;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.metadata.IStruct;
import org.apache.hadoop.metadata.types.*;
import org.apache.hadoop.metadata.IStruct;
import org.apache.hadoop.metadata.ITypedStruct;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataService;
import org.apache.hadoop.metadata.types.*;
import java.math.BigDecimal;
import java.math.BigInteger;
......
......@@ -24,8 +24,6 @@ import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.hadoop.metadata.IReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataService;
import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.storage.Id;
import java.math.BigDecimal;
......@@ -472,7 +470,6 @@ public class DataTypes {
@Override
public ImmutableCollection<?> convert(Object val, Multiplicity m) throws MetadataException {
IRepository r = MetadataService.getCurrentRepository();
if ( val != null ) {
Iterator it = null;
if ( val instanceof Collection ) {
......@@ -583,7 +580,6 @@ public class DataTypes {
@Override
public ImmutableMap<?, ?> convert(Object val, Multiplicity m) throws MetadataException {
IRepository r = MetadataService.getCurrentRepository();
if ( val != null ) {
Iterator<Map.Entry> it = null;
if ( Map.class.isAssignableFrom(val.getClass())) {
......
......@@ -20,8 +20,8 @@ package org.apache.hadoop.metadata.types;
import org.apache.hadoop.metadata.IReferenceableInstance;
import org.apache.hadoop.metadata.IStruct;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataService;
import org.apache.hadoop.metadata.storage.Id;
import java.util.Map;
public class FieldMapping {
......
......@@ -18,7 +18,6 @@
package org.apache.hadoop.metadata.cli
import org.apache.hadoop.metadata.MetadataService
import org.apache.hadoop.metadata.storage.memory.MemRepository
import org.apache.hadoop.metadata.types.TypeSystem
......@@ -46,8 +45,6 @@ class SampleILoop extends ILoop {
val ts: TypeSystem = TypeSystem.getInstance()
val mr: MemRepository = new MemRepository(ts)
val ms : MetadataService = new MetadataService(mr, ts)
MetadataService.setCurrentService(ms)
addThunk {
intp.beQuietDuring {
......@@ -56,7 +53,7 @@ class SampleILoop extends ILoop {
intp.addImports("org.json4s._")
intp.addImports("org.json4s.native.JsonMethods._")
intp.addImports("org.apache.hadoop.metadata.dsl._")
intp.bindValue("service", ms)
//intp.bindValue("service", ms)
//intp.bindValue("cp", intp.compilerClasspath)
}
}
......
......@@ -18,7 +18,7 @@
package org.apache.hadoop.metadata.dsl
import org.apache.hadoop.metadata.{ITypedStruct, MetadataService}
import org.apache.hadoop.metadata.{ITypedStruct}
import org.apache.hadoop.metadata.storage.StructInstance
import org.apache.hadoop.metadata.types.{StructType, TypeSystem}
import scala.language.dynamics
......
......@@ -22,6 +22,7 @@ import java.text.SimpleDateFormat
import org.apache.hadoop.metadata.json.{BigIntegerSerializer, BigDecimalSerializer, TypedStructSerializer, Serialization}
import org.apache.hadoop.metadata.storage.StructInstance
import org.apache.hadoop.metadata.storage.memory.MemRepository
import org.apache.hadoop.metadata.types._
import scala.collection.JavaConverters._
......@@ -41,9 +42,8 @@ package object dsl {
implicit val formats = defFormat + new TypedStructSerializer +
new BigDecimalSerializer + new BigIntegerSerializer
def service = MetadataService.getCurrentService
def ts = TypeSystem.getInstance
def repo = MetadataService.getCurrentRepository
def repo = new MemRepository(ts)
val BOOLEAN_TYPE = DataTypes.BOOLEAN_TYPE
val BYTE_TYPE = DataTypes.BYTE_TYPE
......
......@@ -21,7 +21,7 @@ package org.apache.hadoop.metadata.tools.simpleserver
import akka.actor._
import akka.util.Timeout
import com.google.common.collect.ImmutableList
import org.apache.hadoop.metadata.{TypesDef, MetadataService, ITypedReferenceableInstance}
import org.apache.hadoop.metadata.{TypesDef, ITypedReferenceableInstance}
import org.apache.hadoop.metadata.json._
import org.apache.hadoop.metadata.storage.memory.MemRepository
import org.apache.hadoop.metadata.types._
......@@ -72,12 +72,10 @@ import scala.collection.JavaConversions._
sender ! TypesCreated
case CreateInstance(i) =>
MetadataService.setCurrentService(new MetadataService(memRepository, typeSystem))
val r = memRepository.create(i)
sender ! InstanceCreated(r.getId)
case GetInstance(id) =>
MetadataService.setCurrentService(new MetadataService(memRepository, typeSystem))
val r = memRepository.get(id)
sender ! InstanceDetails(r)
}
......
......@@ -20,7 +20,7 @@ package org.apache.hadoop.metadata.tools.simpleserver
import akka.actor._
import akka.util.Timeout
import org.apache.hadoop.metadata.{TypesDef, MetadataService, ITypedReferenceableInstance}
import org.apache.hadoop.metadata.{TypesDef, ITypedReferenceableInstance}
import org.apache.hadoop.metadata.storage.Id
import org.apache.hadoop.metadata.storage.memory.MemRepository
import org.apache.hadoop.metadata.types.TypeSystem
......
......@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.DataTypes;
......@@ -36,23 +37,25 @@ import java.util.Map;
public abstract class BaseTest {
protected MetadataService ms;
protected IRepository repo;
public static final String STRUCT_TYPE_1 = "t1";
public static final String STRUCT_TYPE_2 = "t2";
protected TypeSystem getTypeSystem() {
protected final TypeSystem getTypeSystem() {
return TypeSystem.getInstance();
}
protected final IRepository getRepository() {
return repo;
}
@Before
public void setup() throws MetadataException {
TypeSystem ts = TypeSystem.getInstance();
ts.reset();
MemRepository mr = new MemRepository(ts);
ms = new MetadataService(mr, ts);
MetadataService.setCurrentService(ms);
repo = new MemRepository(ts);
StructType structType = ts.defineStructType(STRUCT_TYPE_1,
true,
......@@ -79,8 +82,8 @@ public abstract class BaseTest {
}
public static Struct createStruct(MetadataService ms) throws MetadataException {
StructType structType = (StructType) ms.getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_1);
public static Struct createStruct() throws MetadataException {
StructType structType = (StructType) TypeSystem.getInstance().getDataType(StructType.class, STRUCT_TYPE_1);
Struct s = new Struct(structType.getName());
s.set("a", 1);
s.set("b", true);
......@@ -132,7 +135,7 @@ public abstract class BaseTest {
}
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs) throws MetadataException {
return ms.getTypeSystem().defineTraitTypes(tDefs);
return getTypeSystem().defineTraitTypes(tDefs);
}
protected HierarchicalTypeDefinition<TraitType> createTraitTypeDef(String name, ImmutableList<String> superTypes,
......@@ -189,7 +192,7 @@ public abstract class BaseTest {
ts.getDataType(ClassType.class, "Manager")
);
ms.getRepository().defineTypes(types);
repo.defineTypes(types);
}
......
......@@ -109,7 +109,7 @@ public class EnumTest extends BaseTest {
@Test
public void testStruct() throws MetadataException {
TypeSystem ts = ms.getTypeSystem();
TypeSystem ts = getTypeSystem();
defineEnums(ts);
StructType structType = ts.defineStructType("t3",
true,
......@@ -160,7 +160,7 @@ public class EnumTest extends BaseTest {
@Test
public void testClass() throws MetadataException {
TypeSystem ts = ms.getTypeSystem();
TypeSystem ts = getTypeSystem();
defineEnums(ts);
ClassType clsType = defineClassTypeWithEnum(ts);
......@@ -193,16 +193,16 @@ public class EnumTest extends BaseTest {
@Test
public void testStorage() throws MetadataException {
TypeSystem ts = ms.getTypeSystem();
TypeSystem ts = getTypeSystem();
defineEnums(ts);
ClassType clsType = defineClassTypeWithEnum(ts);
ms.getRepository().defineTypes(ImmutableList.of((HierarchicalType)clsType));
getRepository().defineTypes(ImmutableList.of((HierarchicalType)clsType));
IReferenceableInstance r = createInstanceWithEnum("t4");
IReferenceableInstance r1 = ms.getRepository().create(r);
IReferenceableInstance r1 = getRepository().create(r);
ITypedReferenceableInstance r2 = ms.getRepository().get(r1.getId());
ITypedReferenceableInstance r2 = getRepository().get(r1.getId());
Assert.assertEquals(r2.toString(), "{\n" +
"\tid : (type: t4, id: 1)\n" +
"\ta : \t1\n" +
......@@ -230,16 +230,16 @@ public class EnumTest extends BaseTest {
@Test
public void testJson() throws MetadataException {
TypeSystem ts = ms.getTypeSystem();
TypeSystem ts = getTypeSystem();
defineEnums(ts);
ClassType clsType = defineClassTypeWithEnum(ts);
ms.getRepository().defineTypes(ImmutableList.of((HierarchicalType)clsType));
getRepository().defineTypes(ImmutableList.of((HierarchicalType)clsType));
IReferenceableInstance r = createInstanceWithEnum("t4");
IReferenceableInstance r1 = ms.getRepository().create(r);
IReferenceableInstance r1 = getRepository().create(r);
ITypedReferenceableInstance r2 = ms.getRepository().get(r1.getId());
ITypedReferenceableInstance r2 = getRepository().get(r1.getId());
String jsonStr = Serialization$.MODULE$.toJson(r2);
IReferenceableInstance r3 = Serialization$.MODULE$.fromJson(jsonStr);
......
......@@ -19,13 +19,13 @@ public class StorageTest extends BaseTest {
@Test
public void test1() throws MetadataException {
TypeSystem ts = ms.getTypeSystem();
TypeSystem ts = getTypeSystem();
defineDeptEmployeeTypes(ts);
Referenceable hrDept = createDeptEg1(ts);
ITypedReferenceableInstance hrDept2 = ms.getRepository().create(hrDept);
ITypedReferenceableInstance hrDept3 = ms.getRepository().get(hrDept2.getId());
ITypedReferenceableInstance hrDept2 = getRepository().create(hrDept);
ITypedReferenceableInstance hrDept3 = getRepository().get(hrDept2.getId());
Assert.assertEquals(hrDept3.toString(), "{\n" +
"\tid : (type: Department, id: 1)\n" +
"\tname : \thr\n" +
......@@ -49,14 +49,14 @@ public class StorageTest extends BaseTest {
@Test
public void testGetPerson() throws MetadataException {
TypeSystem ts = ms.getTypeSystem();
TypeSystem ts = getTypeSystem();
defineDeptEmployeeTypes(ts);
Referenceable hrDept = createDeptEg1(ts);
ITypedReferenceableInstance hrDept2 = ms.getRepository().create(hrDept);
ITypedReferenceableInstance hrDept2 = getRepository().create(hrDept);
Id e1Id = new Id(2, 0, "Person");
ITypedReferenceableInstance e1 = ms.getRepository().get(e1Id);
ITypedReferenceableInstance e1 = getRepository().get(e1Id);
Assert.assertEquals(e1.toString(), "{\n" +
"\tid : (type: Person, id: 2)\n" +
"\tname : \tJohn\n" +
......@@ -67,15 +67,15 @@ public class StorageTest extends BaseTest {
@Test
public void testInvalidTypeName() throws MetadataException {
TypeSystem ts = ms.getTypeSystem();
TypeSystem ts = getTypeSystem();
defineDeptEmployeeTypes(ts);
Referenceable hrDept = createDeptEg1(ts);
ITypedReferenceableInstance hrDept2 = ms.getRepository().create(hrDept);
ITypedReferenceableInstance hrDept2 = getRepository().create(hrDept);
Id e1Id = new Id(3, 0, "Person");
try {
ITypedReferenceableInstance e1 = ms.getRepository().get(e1Id);
ITypedReferenceableInstance e1 = getRepository().get(e1Id);
} catch(RepositoryException re) {
RepositoryException me = (RepositoryException) re.getCause();
Assert.assertEquals(me.getMessage(), "Invalid Id (unknown) : (type: Person, id: 3)");
......@@ -85,14 +85,14 @@ public class StorageTest extends BaseTest {
@Test
public void testGetManager() throws MetadataException {
TypeSystem ts = ms.getTypeSystem();
TypeSystem ts = getTypeSystem();
defineDeptEmployeeTypes(ts);
Referenceable hrDept = createDeptEg1(ts);
ITypedReferenceableInstance hrDept2 = ms.getRepository().create(hrDept);
ITypedReferenceableInstance hrDept2 = getRepository().create(hrDept);
Id m1Id = new Id(3, 0, "Manager");
ITypedReferenceableInstance m1 = ms.getRepository().get(m1Id);
ITypedReferenceableInstance m1 = getRepository().get(m1Id);
Assert.assertEquals(m1.toString(), "{\n" +
"\tid : (type: Manager, id: 3)\n" +
"\tsubordinates : \t[(type: Person, id: 2)]\n" +
......
......@@ -32,13 +32,13 @@ public class StructTest extends BaseTest {
@Before
public void setup() throws MetadataException {
super.setup();
structType = (StructType) ms.getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_1);
recursiveStructType = (StructType) ms.getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_2);
structType = (StructType) getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_1);
recursiveStructType = (StructType) getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_2);
}
@Test
public void test1() throws MetadataException {
Struct s = createStruct(ms);
Struct s = createStruct();
ITypedStruct ts = structType.convert(s, Multiplicity.REQUIRED);
Assert.assertEquals(ts.toString(), "{\n" +
"\ta : \t1\n" +
......
......@@ -45,7 +45,7 @@ public class TraitTest extends BaseTest {
defineTraits(A, B, C, D);
TraitType DType = (TraitType) ms.getTypeSystem().getDataType(TraitType.class, "D");
TraitType DType = (TraitType) getTypeSystem().getDataType(TraitType.class, "D");
Struct s1 = new Struct("D");
s1.set("d", 1);
......@@ -80,7 +80,7 @@ public class TraitTest extends BaseTest {
/*
* cast to B and set the 'b' attribute on A.
*/
TraitType BType = (TraitType) ms.getTypeSystem().getDataType(TraitType.class, "B");
TraitType BType = (TraitType) getTypeSystem().getDataType(TraitType.class, "B");
IStruct s2 = DType.castAs(ts, "B");
s2.set("A.B.b", false);
......@@ -101,7 +101,7 @@ public class TraitTest extends BaseTest {
/*
* cast again to A and set the 'b' attribute on A.
*/
TraitType AType = (TraitType) ms.getTypeSystem().getDataType(TraitType.class, "A");
TraitType AType = (TraitType) getTypeSystem().getDataType(TraitType.class, "A");
IStruct s3 = BType.castAs(s2, "A");
s3.set("b", true);
Assert.assertEquals(ts.toString(), "{\n" +
......@@ -135,7 +135,7 @@ public class TraitTest extends BaseTest {
defineTraits(B, D, A, C);
TraitType DType = (TraitType) ms.getTypeSystem().getDataType(TraitType.class, "D");
TraitType DType = (TraitType) getTypeSystem().getDataType(TraitType.class, "D");
Struct s1 = new Struct("D");
s1.set("d", 1);
......
package org.apache.hadoop.metadata;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.json.Serialization$;
import org.apache.hadoop.metadata.json.TypesSerialization;
import org.apache.hadoop.metadata.json.TypesSerialization$;
import org.apache.hadoop.metadata.types.*;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
public class VenkateshTest extends BaseTest {
protected List<HierarchicalTypeDefinition> createHiveTypes(TypeSystem typeSystem) throws MetadataException {
ArrayList<HierarchicalTypeDefinition> typeDefinitions = new ArrayList<>();
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
createClassTypeDef("hive_database",
ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("description", DataTypes.STRING_TYPE));
typeDefinitions.add(databaseTypeDefinition);
HierarchicalTypeDefinition<ClassType> tableTypeDefinition = createClassTypeDef(
"hive_table",
ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("description", DataTypes.STRING_TYPE),
createRequiredAttrDef("type", DataTypes.STRING_TYPE),
new AttributeDefinition("hive_database",
"hive_database", Multiplicity.REQUIRED, false, "hive_database"));
typeDefinitions.add(tableTypeDefinition);
HierarchicalTypeDefinition<TraitType> fetlTypeDefinition = createTraitTypeDef(
"hive_fetl",
ImmutableList.<String>of(),
createRequiredAttrDef("level", DataTypes.INT_TYPE));
typeDefinitions.add(fetlTypeDefinition);
typeSystem.defineTypes(
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(fetlTypeDefinition),
ImmutableList.of(databaseTypeDefinition, tableTypeDefinition));
return typeDefinitions;
}
protected ITypedReferenceableInstance createHiveTableInstance(TypeSystem typeSystem) throws MetadataException {
Referenceable databaseInstance = new Referenceable("hive_database");
databaseInstance.set("name", "hive_database");
databaseInstance.set("description", "foo database");
Referenceable tableInstance = new Referenceable("hive_table", "hive_fetl");
tableInstance.set("name", "t1");
tableInstance.set("description", "bar table");
tableInstance.set("type", "managed");
tableInstance.set("hive_database", databaseInstance);
Struct traitInstance = (Struct) tableInstance.getTrait("hive_fetl");
traitInstance.set("level", 1);
tableInstance.set("hive_fetl", traitInstance);
ClassType tableType = typeSystem.getDataType(ClassType.class, "hive_table");
return tableType.convert(tableInstance, Multiplicity.REQUIRED);
}
@Test
public void testType() throws MetadataException {
TypeSystem ts = getTypeSystem();
createHiveTypes(ts);
String jsonStr = TypesSerialization$.MODULE$.toJson(ts, ImmutableList.of("hive_database", "hive_table"));
System.out.println(jsonStr);
TypesDef typesDef1 = TypesSerialization$.MODULE$.fromJson(jsonStr);
System.out.println(typesDef1);
ts.reset();
ts.defineTypes(typesDef1);
jsonStr = TypesSerialization$.MODULE$.toJson(ts, ImmutableList.of("hive_database", "hive_table"));
System.out.println(jsonStr);
}
@Test
public void testInstance() throws MetadataException {
TypeSystem ts = getTypeSystem();
createHiveTypes(ts);
ITypedReferenceableInstance i = createHiveTableInstance(getTypeSystem());
String jsonStr = Serialization$.MODULE$.toJson(i);
System.out.println(jsonStr);
i = Serialization$.MODULE$.fromJson(jsonStr);
System.out.println(i);
}
}
......@@ -47,7 +47,7 @@ public class SerializationJavaTest extends BaseTest {
@Test
public void test1() throws MetadataException {
TypeSystem ts = ms.getTypeSystem();
TypeSystem ts = getTypeSystem();
HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department", ImmutableList.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
......
......@@ -40,12 +40,12 @@ class SerializationTest extends BaseTest {
@Before
override def setup {
super.setup
structType = ms.getTypeSystem.getDataType(classOf[StructType], BaseTest.STRUCT_TYPE_1).asInstanceOf[StructType]
recursiveStructType = ms.getTypeSystem.getDataType(classOf[StructType], BaseTest.STRUCT_TYPE_2).asInstanceOf[StructType]
structType = getTypeSystem.getDataType(classOf[StructType], BaseTest.STRUCT_TYPE_1).asInstanceOf[StructType]
recursiveStructType = getTypeSystem.getDataType(classOf[StructType], BaseTest.STRUCT_TYPE_2).asInstanceOf[StructType]
}
@Test def test1 {
val s: Struct = BaseTest.createStruct(ms)
val s: Struct = BaseTest.createStruct()
val ts: ITypedStruct = structType.convert(s, Multiplicity.REQUIRED)
Assert.assertEquals(ts.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-10\n\tm : \t[1, 1]\n\tn : \t[1.1, 1.1]\n\to : \t{b=2.0, a=1.0}\n}")
......@@ -63,7 +63,7 @@ class SerializationTest extends BaseTest {
}
@Test def test2 {
val s: Struct = BaseTest.createStruct(ms)
val s: Struct = BaseTest.createStruct()
val ts: ITypedStruct = structType.convert(s, Multiplicity.REQUIRED)
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
......@@ -92,7 +92,7 @@ class SerializationTest extends BaseTest {
defineTraits(A, B, C, D)
val DType: TraitType = ms.getTypeSystem.getDataType(classOf[TraitType], "D").asInstanceOf[TraitType]
val DType: TraitType = getTypeSystem.getDataType(classOf[TraitType], "D").asInstanceOf[TraitType]
val s1: Struct = new Struct("D")
s1.set("d", 1)
s1.set("c", 1)
......@@ -106,7 +106,7 @@ class SerializationTest extends BaseTest {
s1.set("A.C.D.c", 3)
s1.set("A.C.D.d", 3)
val s: Struct = BaseTest.createStruct(ms)
val s: Struct = BaseTest.createStruct()
val ts: ITypedStruct = DType.convert(s1, Multiplicity.REQUIRED)
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
......@@ -129,7 +129,7 @@ class SerializationTest extends BaseTest {
@Test def testClass {
val ts: TypeSystem = ms.getTypeSystem
val ts: TypeSystem = getTypeSystem
val deptTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Department",
ImmutableList.of[String],
......
......@@ -27,7 +27,7 @@ class TypesSerializationTest extends BaseTest with TypeHelpers {
@Test def test1: Unit = {
val ts = ms.getTypeSystem
val ts = getTypeSystem
val sDef = structDef("ts1", requiredAttr("a", DataTypes.INT_TYPE),
optionalAttr("b", DataTypes.BOOLEAN_TYPE),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment