Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
0cdacb1d
Commit
0cdacb1d
authored
Apr 28, 2015
by
Harish Butani
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
add instanceBuilder
parent
9ce130b0
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
409 additions
and
50 deletions
+409
-50
InstanceBuilder.scala
...hadoop/metadata/typesystem/builders/InstanceBuilder.scala
+218
-0
BuilderTest.scala
...che/hadoop/metadata/typesystem/builders/BuilderTest.scala
+80
-0
InstanceBuilderTest.scala
...op/metadata/typesystem/builders/InstanceBuilderTest.scala
+105
-0
TypesBuilderTest.scala
...adoop/metadata/typesystem/builders/TypesBuilderTest.scala
+6
-50
No files found.
typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/builders/InstanceBuilder.scala
0 → 100644
View file @
0cdacb1d
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org.apache.hadoop.metadata.typesystem.builders
import
org.apache.hadoop.metadata.typesystem.
{
IReferenceableInstance
,
IStruct
,
Struct
,
Referenceable
}
import
scala.collection.JavaConversions._
import
scala.collection.JavaConverters._
import
scala.collection.mutable.ArrayBuffer
import
scala.language.dynamics
import
scala.language.implicitConversions
import
scala.util.DynamicVariable
class
InstanceBuilder
extends
Dynamic
{
private
val
references
:
ArrayBuffer
[
Referenceable
]
=
new
ArrayBuffer
[
Referenceable
]()
val
context
=
new
DynamicVariable
[
DynamicStruct
](
null
)
def
struct
(
typeName
:
String
)
:
DynamicStruct
=
{
context
.
value
=
new
DynamicStruct
(
this
,
new
Struct
(
typeName
))
context
.
value
}
def
instance
(
typeName
:
String
,
traitNames
:
String*
)(
f
:
=>
Unit
)
:
DynamicReference
=
{
val
r
=
new
Referenceable
(
typeName
,
traitNames
:_
*
)
references
.
append
(
r
)
val
dr
=
new
DynamicReference
(
this
,
r
)
context
.
withValue
(
dr
){
f
}
dr
}
def
create
(
f
:
=>
Unit
)
:
java.util.List
[
Referenceable
]
=
{
f
references
.
asJava
}
def
applyDynamic
(
name
:
String
)(
value
:
Any
)
:
Any
=
{
context
.
value
.
updateDynamic
(
name
)(
value
)
}
implicit
def
symbolToDynamicStruct
(
s
:
Symbol
)
:
DynamicValue
=
new
DynamicValue
(
this
,
s
.
name
,
if
(
context
.
value
==
null
)
null
else
context
.
value
.
s
)
}
object
DynamicValue
{
private
[
builders
]
def
transformOut
(
s
:
IStruct
,
attr
:
String
,
v
:
Any
)(
implicit
ib
:
InstanceBuilder
)
:
DynamicValue
=
v
match
{
case
r
:
Referenceable
=>
new
DynamicReference
(
ib
,
r
)
case
s
:
Struct
=>
new
DynamicStruct
(
ib
,
s
)
case
jL
:
java.util.List
[
_
]
=>
{
if
(
s
!=
null
)
{
new
DynamicCollection
(
ib
,
attr
,
s
)
}
else
{
new
DynamicValue
(
ib
,
attr
,
s
,
jL
.
map
{
e
=>
transformOut
(
null
,
null
,
e
)}.
toSeq
)
}
}
case
jM
:
java.util.Map
[
_
,
_
]
=>
{
if
(
s
!=
null
)
{
new
DynamicMap
(
ib
,
attr
,
s
)
}
else
{
new
DynamicValue
(
ib
,
attr
,
s
,
jM
.
map
{
case
(
k
,
v
)
=>
k
->
transformOut
(
null
,
null
,
v
)
}.
toMap
)
}
}
case
x
=>
{
if
(
s
!=
null
)
{
new
DynamicValue
(
ib
,
attr
,
s
)
}
else
{
new
DynamicValue
(
ib
,
attr
,
s
,
x
)
}
}
}
private
[
builders
]
def
transformIn
(
v
:
Any
)
:
Any
=
v
match
{
case
dr
:
DynamicReference
=>
dr
.
r
case
ds
:
DynamicStruct
=>
ds
.
s
case
dv
:
DynamicValue
=>
dv
.
get
case
l
:
Seq
[
_
]
=>
l
.
map
{
e
=>
transformIn
(
e
)}.
asJava
case
m
:
Map
[
_
,
_
]
=>
m
.
map
{
case
(
k
,
v
)
=>
k
->
transformIn
(
v
)
}.
asJava
case
x
=>
x
}
}
class
DynamicValue
(
val
ib
:
InstanceBuilder
,
val
attrName
:
String
,
val
s
:
IStruct
,
var
value
:
Any
=
null
)
extends
Dynamic
{
import
DynamicValue._
implicit
val
iib
:
InstanceBuilder
=
ib
def
~(
v
:
Any
)
:
Unit
=
{
if
(
s
!=
null
)
{
s
.
set
(
attrName
,
transformIn
(
v
))
}
else
{
value
=
v
}
}
def
get
:
Any
=
if
(
s
!=
null
)
s
.
get
(
attrName
)
else
value
def
selectDynamic
(
name
:
String
)
:
DynamicValue
=
{
throw
new
UnsupportedOperationException
()
}
def
update
(
key
:
Any
,
value
:
Object
)
:
Unit
=
{
throw
new
UnsupportedOperationException
()
}
def
apply
(
key
:
Any
)
:
DynamicValue
=
{
if
(
s
!=
null
&&
s
.
isInstanceOf
[
Referenceable
]
&&
key
.
isInstanceOf
[
String
])
{
val
r
=
s
.
asInstanceOf
[
Referenceable
]
if
(
r
.
getTraits
contains
attrName
)
{
val
traitAttr
=
key
.
asInstanceOf
[
String
]
return
new
DynamicStruct
(
ib
,
r
.
getTrait
(
attrName
)).
selectDynamic
(
traitAttr
)
}
}
throw
new
UnsupportedOperationException
()
}
}
class
DynamicCollection
(
ib
:
InstanceBuilder
,
attrName
:
String
,
s
:
IStruct
)
extends
DynamicValue
(
ib
,
attrName
,
s
)
{
import
DynamicValue._
override
def
update
(
key
:
Any
,
value
:
Object
)
:
Unit
=
{
var
jL
=
s
.
get
(
attrName
)
val
idx
=
key
.
asInstanceOf
[
Int
]
if
(
jL
==
null
)
{
val
l
=
new
java
.
util
.
ArrayList
[
Object
]()
l
.
ensureCapacity
(
idx
)
jL
=
l
}
val
nJL
=
new
java
.
util
.
ArrayList
[
Object
](
jL
.
asInstanceOf
[
java.util.List
[
Object
]])
nJL
.
asInstanceOf
[
java.util.List
[
Object
]].
set
(
idx
,
transformIn
(
value
).
asInstanceOf
[
Object
])
s
.
set
(
attrName
,
nJL
)
}
override
def
apply
(
key
:
Any
)
:
DynamicValue
=
{
var
jL
=
s
.
get
(
attrName
)
val
idx
=
key
.
asInstanceOf
[
Int
]
if
(
jL
==
null
)
{
null
}
else
{
transformOut
(
null
,
null
,
jL
.
asInstanceOf
[
java.util.List
[
Object
]].
get
(
idx
))
}
}
}
class
DynamicMap
(
ib
:
InstanceBuilder
,
attrName
:
String
,
s
:
IStruct
)
extends
DynamicValue
(
ib
,
attrName
,
s
)
{
import
DynamicValue._
override
def
update
(
key
:
Any
,
value
:
Object
)
:
Unit
=
{
var
jM
=
s
.
get
(
attrName
)
if
(
jM
==
null
)
{
jM
=
new
java
.
util
.
HashMap
[
Object
,
Object
]()
}
jM
.
asInstanceOf
[
java.util.Map
[
Object
,
Object
]].
put
(
key
.
asInstanceOf
[
AnyRef
],
value
)
}
override
def
apply
(
key
:
Any
)
:
DynamicValue
=
{
var
jM
=
s
.
get
(
attrName
)
if
(
jM
==
null
)
{
null
}
else
{
transformOut
(
null
,
null
,
jM
.
asInstanceOf
[
java.util.Map
[
Object
,
Object
]].
get
(
key
))
}
}
}
class
DynamicStruct
(
ib
:
InstanceBuilder
,
s
:
IStruct
)
extends
DynamicValue
(
ib
,
null
,
s
)
{
import
DynamicValue._
override
def
selectDynamic
(
name
:
String
)
:
DynamicValue
=
{
transformOut
(
s
,
name
,
s
.
get
(
name
))
}
def
updateDynamic
(
name
:
String
)(
value
:
Any
)
{
s
.
set
(
name
,
transformIn
(
value
))
}
override
def
~(
v
:
Any
)
:
Unit
=
{
throw
new
UnsupportedOperationException
()}
override
def
get
:
Any
=
s
}
class
DynamicReference
(
ib
:
InstanceBuilder
,
val
r
:
IReferenceableInstance
)
extends
DynamicStruct
(
ib
,
r
)
{
private
def
_trait
(
name
:
String
)
=
new
DynamicStruct
(
ib
,
r
.
getTrait
(
name
))
override
def
selectDynamic
(
name
:
String
)
:
DynamicValue
=
{
if
(
r
.
getTraits
contains
name
)
{
_trait
(
name
)
}
else
{
super
.
selectDynamic
(
name
)
}
}
}
typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/BuilderTest.scala
0 → 100644
View file @
0cdacb1d
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org.apache.hadoop.metadata.typesystem.builders
import
org.apache.hadoop.metadata.typesystem.TypesDef
import
org.apache.hadoop.metadata.typesystem.types.TypeSystem
import
org.scalatest.
{
BeforeAndAfter
,
FunSuite
}
abstract
class
BuilderTest
extends
FunSuite
with
BeforeAndAfter
{
var
tDef
:
TypesDef
=
null
before
{
TypeSystem
.
getInstance
().
reset
()
val
b
=
new
TypesBuilder
import
b._
tDef
=
types
{
_trait
(
"Dimension"
)
{}
_trait
(
"PII"
)
{}
_trait
(
"Metric"
)
{}
_trait
(
"ETL"
)
{}
_trait
(
"JdbcAccess"
)
{}
_class
(
"DB"
)
{
"name"
~
(
string
,
required
,
indexed
,
unique
)
"owner"
~
(
string
)
"createTime"
~
(
int
)
}
_class
(
"StorageDesc"
)
{
"inputFormat"
~
(
string
,
required
)
"outputFormat"
~
(
string
,
required
)
}
_class
(
"Column"
)
{
"name"
~
(
string
,
required
)
"dataType"
~
(
string
,
required
)
"sd"
~
(
"StorageDesc"
,
required
)
}
_class
(
"Table"
,
List
())
{
"name"
~
(
string
,
required
,
indexed
)
"db"
~
(
"DB"
,
required
)
"sd"
~
(
"StorageDesc"
,
required
)
}
_class
(
"LoadProcess"
)
{
"name"
~
(
string
,
required
)
"inputTables"
~
(
array
(
"Table"
),
collection
)
"outputTable"
~
(
"Table"
,
required
)
}
_class
(
"View"
)
{
"name"
~
(
string
,
required
)
"inputTables"
~
(
array
(
"Table"
),
collection
)
}
}
}
}
typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/InstanceBuilderTest.scala
0 → 100644
View file @
0cdacb1d
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org.apache.hadoop.metadata.typesystem.builders
import
org.apache.hadoop.metadata.typesystem.types.
{
Multiplicity
,
ClassType
,
TypeSystem
}
class
InstanceBuilderTest
extends
BuilderTest
{
test
(
"test1"
)
{
TypeSystem
.
getInstance
().
defineTypes
(
tDef
)
val
b
=
new
InstanceBuilder
import
b._
val
instances
=
b
create
{
val
salesDB
=
instance
(
"DB"
)
{
// use instance to create Referenceables. use closure to
// set attributes of instance
'name
~
"Sales"
// use '~' to set attributes. Use a Symbol (names starting with ') for
// attribute names.
'owner
~
"John ETL"
'createTime
~
1000
}
val
salesFact
=
instance
(
"Table"
)
{
'name
~
"sales_fact"
'db
~
salesDB
val
sd
=
instance
(
"StorageDesc"
)
{
// any valid scala allowed in closure.
'inputFormat
~
"TextIputFormat"
'outputFormat
~
"TextOutputFormat"
}
'sd
~
sd
// use ~ to set references, collections and maps.
val
columns
=
Seq
(
instance
(
"Column"
)
{
'name
~
"time_id"
'dataType
~
"int"
'sd
~
sd
},
instance
(
"Column"
)
{
'name
~
"product_id"
'dataType
~
"int"
'sd
~
sd
},
instance
(
"Column"
)
{
'name
~
"customer_id"
'dataType
~
"int"
'sd
~
sd
},
instance
(
"Column"
,
"Metric"
)
{
'name
~
"sales"
'dataType
~
"int"
'sd
~
sd
'Metric(
"x"
)
~
1
// use 'TraitName("attrName") to set values on traits.
}
)
'columns
~
columns
}
salesFact
.
sd
.
inputFormat
~
"TextInputFormat"
// use dot navigation to alter attributes in the object graph.
// here I am fixing the typo in "TextInputFormat"
// dot navigation also works for arrays.
// here I am fixing column(3). Metric trait has no attributes.
val
c
=
salesFact
.
columns
c
(
3
)
=
instance
(
"Column"
,
"Metric"
)
{
'name
~
"sales"
'dataType
~
"int"
'sd
~
salesFact
.
sd
}
}
val
ts
=
TypeSystem
.
getInstance
()
import
scala.collection.JavaConversions._
val
typedInstances
=
instances
.
map
{
i
=>
val
iTyp
=
ts
.
getDataType
(
classOf
[
ClassType
],
i
.
getTypeName
)
iTyp
.
convert
(
i
,
Multiplicity
.
REQUIRED
)
}
typedInstances
.
foreach
{
i
=>
println
(
i
)
}
}
}
typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/TypesBuilderTest.scala
View file @
0cdacb1d
...
...
@@ -20,59 +20,15 @@ package org.apache.hadoop.metadata.typesystem.builders
import
org.apache.hadoop.metadata.typesystem.json.TypesSerialization
import
org.apache.hadoop.metadata.typesystem.types.
{
TypeSystem
,
BaseTest
}
import
org.junit.Test
import
org.junit.runner.RunWith
import
org.scalatest.
{
BeforeAndAfter
,
FunSuite
}
import
org.scalatest.junit.JUnitRunner
class
TypesBuilderTest
{
@RunWith
(
classOf
[
JUnitRunner
])
class
TypesBuilderTest
extends
BuilderTest
{
@Test
def
test1
:
Unit
=
{
val
b
=
new
TypesBuilder
import
b._
val
tDef
=
types
{
_trait
(
"Dimension"
)
{}
_trait
(
"PII"
)
{}
_trait
(
"Metric"
)
{}
_trait
(
"ETL"
)
{}
_trait
(
"JdbcAccess"
)
{}
_class
(
"DB"
)
{
"name"
~
(
string
,
required
,
indexed
,
unique
)
"owner"
~
(
string
)
"createTime"
~
(
int
)
}
_class
(
"StorageDesc"
)
{
"inputFormat"
~
(
string
,
required
)
"outputFormat"
~
(
string
,
required
)
}
_class
(
"Column"
)
{
"name"
~
(
string
,
required
)
"dataType"
~
(
string
,
required
)
"sd"
~
(
"StorageDesc"
,
required
)
}
_class
(
"Table"
,
List
())
{
"name"
~
(
string
,
required
,
indexed
)
"db"
~
(
"DB"
,
required
)
"sd"
~
(
"StorageDesc"
,
required
)
}
_class
(
"LoadProcess"
)
{
"name"
~
(
string
,
required
)
"inputTables"
~
(
array
(
"Table"
),
collection
)
"outputTable"
~
(
"Table"
,
required
)
}
_class
(
"View"
)
{
"name"
~
(
string
,
required
)
"inputTables"
~
(
array
(
"Table"
),
collection
)
}
}
test
(
"test1"
)
{
TypeSystem
.
getInstance
().
defineTypes
(
tDef
)
println
(
TypesSerialization
.
toJson
(
TypeSystem
.
getInstance
(),
x
=>
true
))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment