Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
643b6489
Commit
643b6489
authored
9 years ago
by
Harish Butani
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
introduce serialization for structs and referenceables
parent
858348b0
Show whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
171 additions
and
8 deletions
+171
-8
InstanceE2ETest.java
...he/hadoop/metadata/repository/memory/InstanceE2ETest.java
+32
-6
StructTest.java
.../apache/hadoop/metadata/repository/memory/StructTest.java
+27
-0
Referenceable.java
.../org/apache/hadoop/metadata/typesystem/Referenceable.java
+16
-0
Id.java
...org/apache/hadoop/metadata/typesystem/persistence/Id.java
+12
-0
ClassType.java
...rg/apache/hadoop/metadata/typesystem/types/ClassType.java
+1
-1
TypedStructHandler.java
.../hadoop/metadata/typesystem/types/TypedStructHandler.java
+1
-1
Serialization.scala
...pache/hadoop/metadata/typesystem/json/Serialization.scala
+82
-0
No files found.
repository/src/test/java/org/apache/hadoop/metadata/repository/memory/InstanceE2ETest.java
View file @
643b6489
...
...
@@ -20,13 +20,11 @@ package org.apache.hadoop.metadata.repository.memory;
import
com.google.common.collect.ImmutableList
;
import
org.apache.hadoop.metadata.MetadataException
;
import
org.apache.hadoop.metadata.typesystem.TypesDef
;
import
org.apache.hadoop.metadata.typesystem.*
;
import
org.apache.hadoop.metadata.typesystem.json.InstanceSerialization
$
;
import
org.apache.hadoop.metadata.typesystem.json.Serialization
$
;
import
org.apache.hadoop.metadata.typesystem.json.TypesSerialization
$
;
import
org.apache.hadoop.metadata.repository.BaseTest
;
import
org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance
;
import
org.apache.hadoop.metadata.typesystem.Referenceable
;
import
org.apache.hadoop.metadata.typesystem.Struct
;
import
org.apache.hadoop.metadata.typesystem.types.AttributeDefinition
;
import
org.apache.hadoop.metadata.typesystem.types.ClassType
;
import
org.apache.hadoop.metadata.typesystem.types.DataTypes
;
...
...
@@ -78,7 +76,7 @@ public class InstanceE2ETest extends BaseTest {
return
typeDefinitions
;
}
protected
ITypedReferenceableInstance
createHiveTableInstance
(
TypeSystem
typeSystem
)
protected
Referenceable
createHiveTableReferenceable
(
)
throws
MetadataException
{
Referenceable
databaseInstance
=
new
Referenceable
(
"hive_database"
);
databaseInstance
.
set
(
"name"
,
"hive_database"
);
...
...
@@ -95,8 +93,13 @@ public class InstanceE2ETest extends BaseTest {
tableInstance
.
set
(
"hive_fetl"
,
traitInstance
);
return
tableInstance
;
}
protected
ITypedReferenceableInstance
createHiveTableInstance
(
TypeSystem
typeSystem
)
throws
MetadataException
{
ClassType
tableType
=
typeSystem
.
getDataType
(
ClassType
.
class
,
"hive_table"
);
return
tableType
.
convert
(
tableInstance
,
Multiplicity
.
REQUIRED
);
return
tableType
.
convert
(
createHiveTableReferenceable
()
,
Multiplicity
.
REQUIRED
);
}
@Test
...
...
@@ -136,4 +139,27 @@ public class InstanceE2ETest extends BaseTest {
i
=
Serialization
$
.
MODULE
$
.
fromJson
(
jsonStr
);
System
.
out
.
println
(
i
);
}
@Test
public
void
testInstanceSerialization
()
throws
MetadataException
{
TypeSystem
ts
=
getTypeSystem
();
createHiveTypes
(
ts
);
Referenceable
r
=
createHiveTableReferenceable
();
String
jsonStr
=
InstanceSerialization
$
.
MODULE
$
.
toJson
(
r
,
true
);
Referenceable
r1
=
InstanceSerialization
$
.
MODULE
$
.
fromJsonReferenceable
(
jsonStr
,
true
);
ClassType
tableType
=
ts
.
getDataType
(
ClassType
.
class
,
"hive_table"
);
/* todo: fix deserialization, so following conver works
ITypedReferenceableInstance i = tableType.convert(r1, Multiplicity.REQUIRED);
jsonStr = Serialization$.MODULE$.toJson(i);
System.out.println(jsonStr);
i = Serialization$.MODULE$.fromJson(jsonStr);
System.out.println(i);
*/
}
}
This diff is collapsed.
Click to expand it.
repository/src/test/java/org/apache/hadoop/metadata/repository/memory/StructTest.java
View file @
643b6489
...
...
@@ -22,6 +22,7 @@ import org.apache.hadoop.metadata.MetadataException;
import
org.apache.hadoop.metadata.repository.BaseTest
;
import
org.apache.hadoop.metadata.typesystem.ITypedStruct
;
import
org.apache.hadoop.metadata.typesystem.Struct
;
import
org.apache.hadoop.metadata.typesystem.json.InstanceSerialization
$
;
import
org.apache.hadoop.metadata.typesystem.types.Multiplicity
;
import
org.apache.hadoop.metadata.typesystem.types.StructType
;
import
org.junit.Assert
;
...
...
@@ -82,4 +83,30 @@ public class StructTest extends BaseTest {
"}"
);
}
@Test
public
void
testSerialization
()
throws
MetadataException
{
Struct
s
=
createStruct
();
String
jsonStr
=
InstanceSerialization
$
.
MODULE
$
.
toJson
(
s
,
true
);
Struct
s1
=
InstanceSerialization
$
.
MODULE
$
.
fromJsonStruct
(
jsonStr
,
true
);
ITypedStruct
ts
=
structType
.
convert
(
s1
,
Multiplicity
.
REQUIRED
);
Assert
.
assertEquals
(
ts
.
toString
(),
"{\n"
+
"\ta : \t1\n"
+
"\tb : \ttrue\n"
+
"\tc : \t1\n"
+
"\td : \t2\n"
+
"\te : \t1\n"
+
"\tf : \t1\n"
+
"\tg : \t1\n"
+
"\th : \t1.0\n"
+
"\ti : \t1.0\n"
+
"\tj : \t1\n"
+
"\tk : \t1\n"
+
"\tl : \t2014-12-10\n"
+
"\tm : \t[1, 1]\n"
+
"\tn : \t[1.100000000000000088817841970012523233890533447265625, 1"
+
".100000000000000088817841970012523233890533447265625]\n"
+
"\to : \t{b=2.0, a=1.0}\n"
+
"}"
);
}
}
This diff is collapsed.
Click to expand it.
typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/Referenceable.java
View file @
643b6489
...
...
@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList;
import
com.google.common.collect.ImmutableMap
;
import
org.apache.hadoop.metadata.typesystem.persistence.Id
;
import
java.util.List
;
import
java.util.Map
;
/**
...
...
@@ -58,6 +59,21 @@ public class Referenceable extends Struct implements IReferenceableInstance {
traits
=
ImmutableMap
.
of
();
}
/**
* @nopublic only use during deserialization
* @param guid
* @param typeName
* @param values
*/
public
Referenceable
(
String
guid
,
String
typeName
,
Map
<
String
,
Object
>
values
,
List
<
String
>
_traitNames
,
Map
<
String
,
IStruct
>
_traits
)
{
super
(
typeName
,
values
);
id
=
new
Id
(
guid
,
0
,
typeName
);
traitNames
=
ImmutableList
.
copyOf
(
_traitNames
);
traits
=
ImmutableMap
.
copyOf
(
_traits
);
}
@Override
public
ImmutableList
<
String
>
getTraits
()
{
return
traitNames
;
...
...
This diff is collapsed.
Click to expand it.
typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/persistence/Id.java
View file @
643b6489
...
...
@@ -73,6 +73,18 @@ public class Id implements ITypedReferenceableInstance {
.
format
(
"(type: %s, id: %s)"
,
className
,
isUnassigned
()
?
"<unassigned>"
:
""
+
id
);
}
public
String
getClassName
()
{
return
className
;
}
public
int
getVersion
()
{
return
version
;
}
public
String
_getId
()
{
return
id
;
}
@Override
public
boolean
equals
(
Object
o
)
{
if
(
this
==
o
)
return
true
;
...
...
This diff is collapsed.
Click to expand it.
typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/ClassType.java
View file @
643b6489
...
...
@@ -106,7 +106,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
Referenceable
r
=
null
;
Id
id
=
null
;
if
(
s
.
typeName
!=
getName
(
))
{
if
(
!
s
.
typeName
.
equals
(
getName
()
))
{
/*
* If val is a subType instance; invoke convert on it.
*/
...
...
This diff is collapsed.
Click to expand it.
typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypedStructHandler.java
View file @
643b6489
...
...
@@ -53,7 +53,7 @@ public class TypedStructHandler {
return
ts
;
}
else
if
(
val
instanceof
Struct
)
{
Struct
s
=
(
Struct
)
val
;
if
(
s
.
typeName
!=
structType
.
getName
(
))
{
if
(
!
s
.
typeName
.
equals
(
structType
.
getName
()
))
{
throw
new
ValueConversionException
(
structType
,
val
);
}
ITypedStruct
ts
=
createInstance
();
...
...
This diff is collapsed.
Click to expand it.
typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/json/Serialization.scala
View file @
643b6489
...
...
@@ -18,6 +18,8 @@
package
org.apache.hadoop.metadata.typesystem.json
import
java.text.SimpleDateFormat
import
org.apache.hadoop.metadata.typesystem._
import
org.apache.hadoop.metadata.typesystem.persistence.
{
StructInstance
,
Id
,
ReferenceableInstance
}
import
org.apache.hadoop.metadata.typesystem.types.DataTypes.
{
ArrayType
,
MapType
,
TypeCategory
}
...
...
@@ -299,3 +301,83 @@ object Serialization {
read
[
StructInstance
](
jsonStr
)
}
}
object
InstanceSerialization
{
case
class
_Id
(
id
:
String
,
version
:
Int
,
typeName
:
String
)
case
class
_Struct
(
typeName
:
String
,
values
:
Map
[
String
,
AnyRef
])
case
class
_Reference
(
id
:
_
Id
,
typeName
:
String
,
values
:
Map
[
String
,
AnyRef
],
traitNames
:
List
[
String
],
traits
:
Map
[
String
,
_
Struct
])
def
asJava
(
v
:
Any
)
:
Any
=
v
match
{
case
i
:
_
Id
=>
new
Id
(
i
.
id
,
i
.
version
,
i
.
typeName
)
case
s
:
_
Struct
=>
new
Struct
(
s
.
typeName
,
asJava
(
s
.
values
).
asInstanceOf
[
java.util.Map
[
String
,
Object
]])
case
r
:
_
Reference
=>
{
new
Referenceable
(
r
.
id
.
asInstanceOf
[
_
Id
].
id
,
r
.
typeName
,
asJava
(
r
.
values
).
asInstanceOf
[
java.util.Map
[
String
,
Object
]],
asJava
(
r
.
traitNames
).
asInstanceOf
[
java.util.List
[
String
]],
asJava
(
r
.
traits
).
asInstanceOf
[
java.util.Map
[
String
,
IStruct
]])
}
case
l
:
List
[
_
]
=>
l
.
map
(
e
=>
asJava
(
e
)).
asJava
case
m
:
Map
[
_
,
_
]
=>
m
.
mapValues
(
v
=>
asJava
(
v
)).
asJava
case
_
=>
v
}
def
asScala
(
v
:
Any
)
:
Any
=
v
match
{
case
i
:
Id
=>
_Id
(
i
.
_getId
(),
i
.
getVersion
,
i
.
getClassName
)
case
r
:
Referenceable
=>
{
val
traits
=
r
.
getTraits
.
map
{
tName
=>
val
t
=
r
.
getTrait
(
tName
).
asInstanceOf
[
Struct
]
(
tName
->
_Struct
(
t
.
getTypeName
,
asScala
(
t
.
getValuesMap
).
asInstanceOf
[
Map
[
String
,
AnyRef
]]))
}.
toMap
_Reference
(
asScala
(
r
.
getId
).
asInstanceOf
[
_
Id
],
r
.
typeName
,
asScala
(
r
.
getValuesMap
).
asInstanceOf
[
Map
[
String
,
AnyRef
]],
asScala
(
r
.
getTraits
).
asInstanceOf
[
List
[
String
]],
traits
.
asInstanceOf
[
Map
[
String
,
_
Struct
]])
}
case
s
:
Struct
=>
_Struct
(
s
.
typeName
,
asScala
(
s
.
getValuesMap
).
asInstanceOf
[
Map
[
String
,
AnyRef
]])
case
l
:
java.util.List
[
_
]
=>
l
.
asScala
.
map
(
e
=>
asScala
(
e
)).
toList
case
m
:
java.util.Map
[
_
,
_
]
=>
m
.
asScala
.
mapValues
(
v
=>
asScala
(
v
)).
toMap
case
_
=>
v
}
val
_formats
=
new
DefaultFormats
{
override
val
dateFormatter
=
TypeSystem
.
getInstance
().
getDateFormat
.
asInstanceOf
[
SimpleDateFormat
]
override
val
typeHints
=
NoTypeHints
}
def
buildFormat
(
withBigDecimals
:
Boolean
)
=
{
if
(
withBigDecimals
)
_formats
+
new
BigDecimalSerializer
+
new
BigIntegerSerializer
else
_formats
}
def
_toJson
(
value
:
AnyRef
,
withBigDecimals
:
Boolean
=
false
)
:
String
=
{
implicit
val
formats
=
buildFormat
(
withBigDecimals
)
val
_s
:
AnyRef
=
asScala
(
value
).
asInstanceOf
[
AnyRef
]
writePretty
(
_s
)
}
def
toJson
(
value
:
Struct
,
withBigDecimals
:
Boolean
=
false
)
:
String
=
{
_toJson
(
value
,
withBigDecimals
)
}
def
fromJsonStruct
(
jsonStr
:
String
,
withBigDecimals
:
Boolean
=
false
)
:
Struct
=
{
implicit
val
formats
=
buildFormat
(
withBigDecimals
)
val
_s
=
read
[
_
Struct
](
jsonStr
)
asJava
(
_s
).
asInstanceOf
[
Struct
]
}
//def toJsonReferenceable(value: Referenceable, withBigDecimals : Boolean = false): String = _toJson(value, withBigDecimals)
def
fromJsonReferenceable
(
jsonStr
:
String
,
withBigDecimals
:
Boolean
=
false
)
:
Referenceable
=
{
implicit
val
formats
=
buildFormat
(
withBigDecimals
)
val
_s
=
read
[
_
Reference
](
jsonStr
)
asJava
(
_s
).
asInstanceOf
[
Referenceable
]
}
}
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment