Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
1d85e95f
Commit
1d85e95f
authored
Feb 17, 2017
by
ashutoshm
Committed by
Madhan Neethiraj
Feb 17, 2017
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
ATLAS-1503: optimization of import API implementation
Signed-off-by:
Madhan Neethiraj
<
madhan@apache.org
>
parent
852a7118
Expand all
Show whitespace changes
Inline
Side-by-side
Showing
14 changed files
with
270 additions
and
133 deletions
+270
-133
AtlasErrorCode.java
intg/src/main/java/org/apache/atlas/AtlasErrorCode.java
+1
-0
AtlasImportResult.java
...java/org/apache/atlas/model/impexp/AtlasImportResult.java
+11
-0
EntityMutations.java
...java/org/apache/atlas/model/instance/EntityMutations.java
+1
-1
TestUtilsV2.java
intg/src/test/java/org/apache/atlas/TestUtilsV2.java
+1
-0
AtlasEntityStore.java
...apache/atlas/repository/store/graph/AtlasEntityStore.java
+9
-0
AtlasEntityStoreV1.java
...e/atlas/repository/store/graph/v1/AtlasEntityStoreV1.java
+66
-11
AtlasEntityStream.java
...he/atlas/repository/store/graph/v1/AtlasEntityStream.java
+10
-4
AtlasEntityStreamForImport.java
...repository/store/graph/v1/AtlasEntityStreamForImport.java
+30
-0
EntityGraphMapper.java
...he/atlas/repository/store/graph/v1/EntityGraphMapper.java
+8
-18
AtlasEntityStoreV1Test.java
...las/repository/store/graph/v1/AtlasEntityStoreV1Test.java
+37
-28
AdminResource.java
...in/java/org/apache/atlas/web/resources/AdminResource.java
+33
-1
ExportService.java
...in/java/org/apache/atlas/web/resources/ExportService.java
+7
-4
ImportService.java
...in/java/org/apache/atlas/web/resources/ImportService.java
+18
-11
ZipSource.java
...c/main/java/org/apache/atlas/web/resources/ZipSource.java
+38
-55
No files found.
intg/src/main/java/org/apache/atlas/AtlasErrorCode.java
View file @
1d85e95f
...
@@ -88,6 +88,7 @@ public enum AtlasErrorCode {
...
@@ -88,6 +88,7 @@ public enum AtlasErrorCode {
INDEX_CREATION_FAILED
(
500
,
"ATLAS5002E"
,
"Index creation failed for {0}"
),
INDEX_CREATION_FAILED
(
500
,
"ATLAS5002E"
,
"Index creation failed for {0}"
),
INDEX_ROLLBACK_FAILED
(
500
,
"ATLAS5003E"
,
"Index rollback failed for {0}"
),
INDEX_ROLLBACK_FAILED
(
500
,
"ATLAS5003E"
,
"Index rollback failed for {0}"
),
FAILED_TO_OBTAIN_TYPE_UPDATE_LOCK
(
500
,
"ATLAS5004E"
,
"Failed to get the lock; another type update might be in progress. Please try again"
),
FAILED_TO_OBTAIN_TYPE_UPDATE_LOCK
(
500
,
"ATLAS5004E"
,
"Failed to get the lock; another type update might be in progress. Please try again"
),
FAILED_TO_OBTAIN_IMPORT_EXPORT_LOCK
(
500
,
"ATLAS5005E"
,
"Another import or export is in progress. Please try again"
),
NOTIFICATION_FAILED
(
500
,
"ATLAS5005E"
,
"Failed to notify for change {0}"
);
NOTIFICATION_FAILED
(
500
,
"ATLAS5005E"
,
"Failed to notify for change {0}"
);
private
String
errorCode
;
private
String
errorCode
;
...
...
intg/src/main/java/org/apache/atlas/model/impexp/AtlasImportResult.java
View file @
1d85e95f
...
@@ -26,7 +26,9 @@ import javax.xml.bind.annotation.XmlAccessType;
...
@@ -26,7 +26,9 @@ import javax.xml.bind.annotation.XmlAccessType;
import
javax.xml.bind.annotation.XmlAccessorType
;
import
javax.xml.bind.annotation.XmlAccessorType
;
import
javax.xml.bind.annotation.XmlRootElement
;
import
javax.xml.bind.annotation.XmlRootElement
;
import
java.util.ArrayList
;
import
java.util.HashMap
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Map
;
import
static
org
.
codehaus
.
jackson
.
annotate
.
JsonAutoDetect
.
Visibility
.
NONE
;
import
static
org
.
codehaus
.
jackson
.
annotate
.
JsonAutoDetect
.
Visibility
.
NONE
;
...
@@ -50,6 +52,7 @@ public class AtlasImportResult {
...
@@ -50,6 +52,7 @@ public class AtlasImportResult {
private
String
hostName
;
private
String
hostName
;
private
long
timeStamp
;
private
long
timeStamp
;
private
Map
<
String
,
Integer
>
metrics
;
private
Map
<
String
,
Integer
>
metrics
;
private
List
<
String
>
processedEntities
;
private
OperationStatus
operationStatus
;
private
OperationStatus
operationStatus
;
public
AtlasImportResult
()
{
public
AtlasImportResult
()
{
...
@@ -65,6 +68,7 @@ public class AtlasImportResult {
...
@@ -65,6 +68,7 @@ public class AtlasImportResult {
this
.
timeStamp
=
timeStamp
;
this
.
timeStamp
=
timeStamp
;
this
.
metrics
=
new
HashMap
<>();
this
.
metrics
=
new
HashMap
<>();
this
.
operationStatus
=
OperationStatus
.
FAIL
;
this
.
operationStatus
=
OperationStatus
.
FAIL
;
this
.
processedEntities
=
new
ArrayList
<>();
}
}
public
AtlasImportRequest
getRequest
()
{
public
AtlasImportRequest
getRequest
()
{
...
@@ -133,6 +137,10 @@ public class AtlasImportResult {
...
@@ -133,6 +137,10 @@ public class AtlasImportResult {
metrics
.
put
(
key
,
currentValue
+
incrementBy
);
metrics
.
put
(
key
,
currentValue
+
incrementBy
);
}
}
public
void
setProcessedEntities
(
List
<
String
>
processedEntities
)
{
this
.
processedEntities
=
processedEntities
;
}
public
List
<
String
>
getProcessedEntities
()
{
return
this
.
processedEntities
;
}
public
StringBuilder
toString
(
StringBuilder
sb
)
{
public
StringBuilder
toString
(
StringBuilder
sb
)
{
if
(
sb
==
null
)
{
if
(
sb
==
null
)
{
sb
=
new
StringBuilder
();
sb
=
new
StringBuilder
();
...
@@ -149,6 +157,9 @@ public class AtlasImportResult {
...
@@ -149,6 +157,9 @@ public class AtlasImportResult {
sb
.
append
(
"}"
);
sb
.
append
(
"}"
);
sb
.
append
(
", operationStatus='"
).
append
(
operationStatus
).
append
(
"'"
);
sb
.
append
(
", operationStatus='"
).
append
(
operationStatus
).
append
(
"'"
);
sb
.
append
(
", processedEntities=["
);
AtlasBaseTypeDef
.
dumpObjects
(
processedEntities
,
sb
);
sb
.
append
(
"]"
);
sb
.
append
(
"}"
);
sb
.
append
(
"}"
);
return
sb
;
return
sb
;
...
...
intg/src/main/java/org/apache/atlas/model/instance/EntityMutations.java
View file @
1d85e95f
...
@@ -46,7 +46,7 @@ public class EntityMutations implements Serializable {
...
@@ -46,7 +46,7 @@ public class EntityMutations implements Serializable {
CREATE
,
CREATE
,
UPDATE
,
UPDATE
,
PARTIAL_UPDATE
,
PARTIAL_UPDATE
,
DELETE
,
DELETE
}
}
public
static
final
class
EntityMutation
implements
Serializable
{
public
static
final
class
EntityMutation
implements
Serializable
{
...
...
intg/src/test/java/org/apache/atlas/TestUtilsV2.java
View file @
1d85e95f
...
@@ -562,6 +562,7 @@ public final class TestUtilsV2 {
...
@@ -562,6 +562,7 @@ public final class TestUtilsV2 {
AtlasTypeUtil
.
createUniqueRequiredAttrDef
(
NAME
,
"string"
),
AtlasTypeUtil
.
createUniqueRequiredAttrDef
(
NAME
,
"string"
),
AtlasTypeUtil
.
createOptionalAttrDef
(
"isReplicated"
,
"boolean"
),
AtlasTypeUtil
.
createOptionalAttrDef
(
"isReplicated"
,
"boolean"
),
AtlasTypeUtil
.
createOptionalAttrDef
(
"created"
,
"string"
),
AtlasTypeUtil
.
createOptionalAttrDef
(
"created"
,
"string"
),
AtlasTypeUtil
.
createOptionalAttrDef
(
"parameters"
,
"map<string,string>"
),
AtlasTypeUtil
.
createRequiredAttrDef
(
"description"
,
"string"
));
AtlasTypeUtil
.
createRequiredAttrDef
(
"description"
,
"string"
));
...
...
repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java
View file @
1d85e95f
...
@@ -19,6 +19,7 @@ package org.apache.atlas.repository.store.graph;
...
@@ -19,6 +19,7 @@ package org.apache.atlas.repository.store.graph;
import
org.apache.atlas.exception.AtlasBaseException
;
import
org.apache.atlas.exception.AtlasBaseException
;
import
org.apache.atlas.model.impexp.AtlasImportResult
;
import
org.apache.atlas.model.instance.AtlasClassification
;
import
org.apache.atlas.model.instance.AtlasClassification
;
import
org.apache.atlas.model.instance.AtlasEntity
;
import
org.apache.atlas.model.instance.AtlasEntity
;
import
org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo
;
import
org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo
;
...
@@ -69,6 +70,14 @@ public interface AtlasEntityStore {
...
@@ -69,6 +70,14 @@ public interface AtlasEntityStore {
EntityMutationResponse
createOrUpdate
(
EntityStream
entityStream
,
boolean
isPartialUpdate
)
throws
AtlasBaseException
;
EntityMutationResponse
createOrUpdate
(
EntityStream
entityStream
,
boolean
isPartialUpdate
)
throws
AtlasBaseException
;
/**
/**
* Create or update entities in the stream using repeated commits of connected entities
* @param entityStream AtlasEntityStream
* @return EntityMutationResponse Entity mutations operations with the corresponding set of entities on which these operations were performed
* @throws AtlasBaseException
*/
EntityMutationResponse
bulkImport
(
EntityStream
entityStream
,
AtlasImportResult
importResult
)
throws
AtlasBaseException
;
/**
* Update a single entity
* Update a single entity
* @param entityType type of the entity
* @param entityType type of the entity
* @param uniqAttributes Attributes that uniquely identify the entity
* @param uniqAttributes Attributes that uniquely identify the entity
...
...
repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1.java
View file @
1d85e95f
...
@@ -24,13 +24,10 @@ import org.apache.atlas.AtlasErrorCode;
...
@@ -24,13 +24,10 @@ import org.apache.atlas.AtlasErrorCode;
import
org.apache.atlas.GraphTransaction
;
import
org.apache.atlas.GraphTransaction
;
import
org.apache.atlas.RequestContextV1
;
import
org.apache.atlas.RequestContextV1
;
import
org.apache.atlas.exception.AtlasBaseException
;
import
org.apache.atlas.exception.AtlasBaseException
;
import
org.apache.atlas.model.i
nstance.AtlasClassification
;
import
org.apache.atlas.model.i
mpexp.AtlasImportResult
;
import
org.apache.atlas.model.instance.
AtlasEntity
;
import
org.apache.atlas.model.instance.
*
;
import
org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo
;
import
org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo
;
import
org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo
;
import
org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo
;
import
org.apache.atlas.model.instance.AtlasObjectId
;
import
org.apache.atlas.model.instance.EntityMutationResponse
;
import
org.apache.atlas.model.instance.EntityMutations
;
import
org.apache.atlas.repository.graphdb.AtlasVertex
;
import
org.apache.atlas.repository.graphdb.AtlasVertex
;
import
org.apache.atlas.repository.store.graph.AtlasEntityStore
;
import
org.apache.atlas.repository.store.graph.AtlasEntityStore
;
import
org.apache.atlas.repository.store.graph.EntityGraphDiscovery
;
import
org.apache.atlas.repository.store.graph.EntityGraphDiscovery
;
...
@@ -43,10 +40,9 @@ import org.apache.commons.lang3.StringUtils;
...
@@ -43,10 +40,9 @@ import org.apache.commons.lang3.StringUtils;
import
org.slf4j.Logger
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
org.slf4j.LoggerFactory
;
import
java.util.ArrayList
;
import
java.util.*
;
import
java.util.Collection
;
import
java.util.List
;
import
static
org
.
apache
.
atlas
.
model
.
instance
.
EntityMutations
.
EntityOperation
.*;
import
java.util.Map
;
@Singleton
@Singleton
...
@@ -130,6 +126,65 @@ public class AtlasEntityStoreV1 implements AtlasEntityStore {
...
@@ -130,6 +126,65 @@ public class AtlasEntityStoreV1 implements AtlasEntityStore {
}
}
@Override
@Override
public
EntityMutationResponse
bulkImport
(
EntityStream
entityStream
,
AtlasImportResult
importResult
)
throws
AtlasBaseException
{
if
(
LOG
.
isDebugEnabled
())
{
LOG
.
debug
(
"==> bulkImport()"
);
}
if
(
entityStream
==
null
||
!
entityStream
.
hasNext
())
{
throw
new
AtlasBaseException
(
AtlasErrorCode
.
INVALID_PARAMETERS
,
"no entities to create/update."
);
}
EntityMutationResponse
ret
=
new
EntityMutationResponse
();
ret
.
setGuidAssignments
(
new
HashMap
<
String
,
String
>());
Set
<
String
>
processedGuids
=
new
HashSet
<>();
int
progressReportedAtCount
=
0
;
while
(
entityStream
.
hasNext
())
{
AtlasEntity
entity
=
entityStream
.
next
();
if
(
processedGuids
.
contains
(
entity
.
getGuid
()))
{
continue
;
}
AtlasEntityStreamForImport
oneEntityStream
=
new
AtlasEntityStreamForImport
(
entity
,
entityStream
);
EntityMutationResponse
resp
=
createOrUpdate
(
oneEntityStream
,
false
);
updateImportMetrics
(
"entity:%s:created"
,
resp
.
getCreatedEntities
(),
processedGuids
,
importResult
);
updateImportMetrics
(
"entity:%s:updated"
,
resp
.
getUpdatedEntities
(),
processedGuids
,
importResult
);
updateImportMetrics
(
"entity:%s:deleted"
,
resp
.
getDeletedEntities
(),
processedGuids
,
importResult
);
if
((
processedGuids
.
size
()
-
progressReportedAtCount
)
>
10
)
{
progressReportedAtCount
=
processedGuids
.
size
();
LOG
.
info
(
"bulkImport(): in progress.. number of entities imported: {}"
,
progressReportedAtCount
);
}
if
(
resp
.
getGuidAssignments
()
!=
null
)
{
ret
.
getGuidAssignments
().
putAll
(
resp
.
getGuidAssignments
());
}
}
importResult
.
getProcessedEntities
().
addAll
(
processedGuids
);
LOG
.
info
(
"bulkImport(): done. Number of entities imported: {}"
,
processedGuids
.
size
());
return
ret
;
}
private
void
updateImportMetrics
(
String
prefix
,
List
<
AtlasEntityHeader
>
list
,
Set
<
String
>
processedGuids
,
AtlasImportResult
importResult
)
{
if
(
list
==
null
)
{
return
;
}
for
(
AtlasEntityHeader
h
:
list
)
{
processedGuids
.
add
(
h
.
getGuid
());
importResult
.
incrementMeticsCounter
(
String
.
format
(
prefix
,
h
.
getTypeName
()));
}
}
@Override
@GraphTransaction
@GraphTransaction
public
EntityMutationResponse
createOrUpdate
(
EntityStream
entityStream
,
boolean
isPartialUpdate
)
throws
AtlasBaseException
{
public
EntityMutationResponse
createOrUpdate
(
EntityStream
entityStream
,
boolean
isPartialUpdate
)
throws
AtlasBaseException
{
if
(
LOG
.
isDebugEnabled
())
{
if
(
LOG
.
isDebugEnabled
())
{
...
@@ -323,11 +378,11 @@ public class AtlasEntityStoreV1 implements AtlasEntityStore {
...
@@ -323,11 +378,11 @@ public class AtlasEntityStoreV1 implements AtlasEntityStore {
deleteHandler
.
deleteEntities
(
deletionCandidates
);
deleteHandler
.
deleteEntities
(
deletionCandidates
);
RequestContextV1
req
=
RequestContextV1
.
get
();
RequestContextV1
req
=
RequestContextV1
.
get
();
for
(
AtlasObjectId
id
:
req
.
getDeletedEntityIds
())
{
for
(
AtlasObjectId
id
:
req
.
getDeletedEntityIds
())
{
response
.
addEntity
(
EntityMutations
.
EntityOperation
.
DELETE
,
EntityGraphMapper
.
constructHeader
(
id
));
response
.
addEntity
(
DELETE
,
EntityGraphMapper
.
constructHeader
(
id
));
}
}
for
(
AtlasObjectId
id
:
req
.
getUpdatedEntityIds
())
{
for
(
AtlasObjectId
id
:
req
.
getUpdatedEntityIds
())
{
response
.
addEntity
(
EntityMutations
.
EntityOperation
.
UPDATE
,
EntityGraphMapper
.
constructHeader
(
id
));
response
.
addEntity
(
UPDATE
,
EntityGraphMapper
.
constructHeader
(
id
));
}
}
return
response
;
return
response
;
...
...
repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStream.java
View file @
1d85e95f
...
@@ -24,11 +24,10 @@ import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
...
@@ -24,11 +24,10 @@ import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
import
java.util.Iterator
;
import
java.util.Iterator
;
public
class
AtlasEntityStream
implements
EntityStream
{
public
class
AtlasEntityStream
implements
EntityStream
{
private
AtlasEntitiesWithExtInfo
entitiesWithExtInfo
=
new
AtlasEntitiesWithExtInfo
();
private
final
AtlasEntitiesWithExtInfo
entitiesWithExtInfo
;
private
final
EntityStream
entityStream
;
private
Iterator
<
AtlasEntity
>
iterator
;
private
Iterator
<
AtlasEntity
>
iterator
;
public
AtlasEntityStream
()
{
}
public
AtlasEntityStream
(
AtlasEntity
entity
)
{
public
AtlasEntityStream
(
AtlasEntity
entity
)
{
this
(
new
AtlasEntitiesWithExtInfo
(
entity
));
this
(
new
AtlasEntitiesWithExtInfo
(
entity
));
...
@@ -41,6 +40,13 @@ public class AtlasEntityStream implements EntityStream {
...
@@ -41,6 +40,13 @@ public class AtlasEntityStream implements EntityStream {
public
AtlasEntityStream
(
AtlasEntitiesWithExtInfo
entitiesWithExtInfo
)
{
public
AtlasEntityStream
(
AtlasEntitiesWithExtInfo
entitiesWithExtInfo
)
{
this
.
entitiesWithExtInfo
=
entitiesWithExtInfo
;
this
.
entitiesWithExtInfo
=
entitiesWithExtInfo
;
this
.
iterator
=
this
.
entitiesWithExtInfo
.
getEntities
().
iterator
();
this
.
iterator
=
this
.
entitiesWithExtInfo
.
getEntities
().
iterator
();
this
.
entityStream
=
null
;
}
public
AtlasEntityStream
(
AtlasEntity
entity
,
EntityStream
entityStream
)
{
this
.
entitiesWithExtInfo
=
new
AtlasEntitiesWithExtInfo
(
entity
);
this
.
iterator
=
this
.
entitiesWithExtInfo
.
getEntities
().
iterator
();
this
.
entityStream
=
entityStream
;
}
}
@Override
@Override
...
@@ -60,7 +66,7 @@ public class AtlasEntityStream implements EntityStream {
...
@@ -60,7 +66,7 @@ public class AtlasEntityStream implements EntityStream {
@Override
@Override
public
AtlasEntity
getByGuid
(
String
guid
)
{
public
AtlasEntity
getByGuid
(
String
guid
)
{
return
entitiesWithExtInfo
.
getEntity
(
guid
);
return
entit
yStream
!=
null
?
entityStream
.
getByGuid
(
guid
)
:
entit
iesWithExtInfo
.
getEntity
(
guid
);
}
}
@Override
@Override
...
...
repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStreamForImport.java
0 → 100644
View file @
1d85e95f
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
atlas
.
repository
.
store
.
graph
.
v1
;
import
org.apache.atlas.model.instance.AtlasEntity
;
public
class
AtlasEntityStreamForImport
extends
AtlasEntityStream
implements
EntityImportStream
{
public
AtlasEntityStreamForImport
(
AtlasEntity
entity
)
{
super
(
entity
);
}
public
AtlasEntityStreamForImport
(
AtlasEntity
entity
,
EntityStream
entityStream
)
{
super
(
entity
,
entityStream
);
}
}
repository/src/main/java/org/apache/atlas/repository/store/graph/v1/EntityGraphMapper.java
View file @
1d85e95f
...
@@ -739,28 +739,18 @@ public class EntityGraphMapper {
...
@@ -739,28 +739,18 @@ public class EntityGraphMapper {
private
AtlasEntityHeader
constructHeader
(
AtlasEntity
entity
,
final
AtlasEntityType
type
,
AtlasVertex
vertex
)
{
private
AtlasEntityHeader
constructHeader
(
AtlasEntity
entity
,
final
AtlasEntityType
type
,
AtlasVertex
vertex
)
{
//TODO - enhance to return only selective attributes
AtlasEntityHeader
header
=
new
AtlasEntityHeader
(
entity
.
getTypeName
());
AtlasEntityHeader
header
=
new
AtlasEntityHeader
(
entity
.
getTypeName
(),
AtlasGraphUtilsV1
.
getIdFromVertex
(
vertex
),
entity
.
getAttributes
());
final
Map
<
String
,
AtlasStructType
.
AtlasAttribute
>
allAttributes
=
type
.
getAllAttributes
();
header
.
setGuid
(
AtlasGraphUtilsV1
.
getIdFromVertex
(
vertex
));
for
(
String
attribute
:
allAttributes
.
keySet
())
{
AtlasType
attributeType
=
allAttributes
.
get
(
attribute
).
getAttributeType
();
for
(
AtlasAttribute
attribute
:
type
.
getUniqAttributes
().
values
())
{
AtlasAttributeDef
attributeDef
=
allAttributes
.
get
(
attribute
).
getAttributeDef
();
header
.
setAttribute
(
attribute
.
getName
(),
entity
.
getAttribute
(
attribute
.
getName
()));
if
(
header
.
getAttribute
(
attribute
)
==
null
&&
(
TypeCategory
.
PRIMITIVE
==
attributeType
.
getTypeCategory
()))
{
if
(
attributeDef
.
getIsOptional
())
{
header
.
setAttribute
(
attribute
,
attributeType
.
createOptionalDefaultValue
());
}
else
{
header
.
setAttribute
(
attribute
,
attributeType
.
createDefaultValue
());
}
}
}
}
return
header
;
return
header
;
}
}
public
static
AtlasEntityHeader
constructHeader
(
AtlasObjectId
id
)
{
public
static
AtlasEntityHeader
constructHeader
(
AtlasObjectId
id
)
{
AtlasEntityHeader
entity
=
new
AtlasEntityHeader
(
id
.
getTypeName
());
return
new
AtlasEntityHeader
(
id
.
getTypeName
(),
id
.
getGuid
(),
id
.
getUniqueAttributes
());
entity
.
setGuid
(
id
.
getGuid
());
return
entity
;
}
}
}
}
repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1Test.java
View file @
1d85e95f
This diff is collapsed.
Click to expand it.
webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java
View file @
1d85e95f
...
@@ -20,7 +20,7 @@ package org.apache.atlas.web.resources;
...
@@ -20,7 +20,7 @@ package org.apache.atlas.web.resources;
import
com.google.inject.Inject
;
import
com.google.inject.Inject
;
import
org.apache.atlas.AtlasClient
;
import
org.apache.atlas.AtlasClient
;
import
org.apache.atlas.AtlasE
xception
;
import
org.apache.atlas.AtlasE
rrorCode
;
import
org.apache.atlas.exception.AtlasBaseException
;
import
org.apache.atlas.exception.AtlasBaseException
;
import
org.apache.atlas.model.impexp.AtlasExportRequest
;
import
org.apache.atlas.model.impexp.AtlasExportRequest
;
import
org.apache.atlas.model.impexp.AtlasExportResult
;
import
org.apache.atlas.model.impexp.AtlasExportResult
;
...
@@ -59,6 +59,7 @@ import javax.ws.rs.core.Response;
...
@@ -59,6 +59,7 @@ import javax.ws.rs.core.Response;
import
java.io.ByteArrayInputStream
;
import
java.io.ByteArrayInputStream
;
import
java.io.IOException
;
import
java.io.IOException
;
import
java.util.*
;
import
java.util.*
;
import
java.util.concurrent.locks.ReentrantLock
;
import
static
org
.
apache
.
atlas
.
repository
.
converters
.
AtlasInstanceConverter
.
toAtlasBaseException
;
import
static
org
.
apache
.
atlas
.
repository
.
converters
.
AtlasInstanceConverter
.
toAtlasBaseException
;
...
@@ -76,6 +77,8 @@ public class AdminResource {
...
@@ -76,6 +77,8 @@ public class AdminResource {
@Context
@Context
private
HttpServletResponse
httpServletResponse
;
private
HttpServletResponse
httpServletResponse
;
private
final
ReentrantLock
importExportOperationLock
;
private
static
final
String
isCSRF_ENABLED
=
"atlas.rest-csrf.enabled"
;
private
static
final
String
isCSRF_ENABLED
=
"atlas.rest-csrf.enabled"
;
private
static
final
String
BROWSER_USER_AGENT_PARAM
=
"atlas.rest-csrf.browser-useragents-regex"
;
private
static
final
String
BROWSER_USER_AGENT_PARAM
=
"atlas.rest-csrf.browser-useragents-regex"
;
private
static
final
String
CUSTOM_METHODS_TO_IGNORE_PARAM
=
"atlas.rest-csrf.methods-to-ignore"
;
private
static
final
String
CUSTOM_METHODS_TO_IGNORE_PARAM
=
"atlas.rest-csrf.methods-to-ignore"
;
...
@@ -102,6 +105,7 @@ public class AdminResource {
...
@@ -102,6 +105,7 @@ public class AdminResource {
this
.
typeRegistry
=
typeRegistry
;
this
.
typeRegistry
=
typeRegistry
;
this
.
typesDefStore
=
typeDefStore
;
this
.
typesDefStore
=
typeDefStore
;
this
.
entityStore
=
entityStore
;
this
.
entityStore
=
entityStore
;
this
.
importExportOperationLock
=
new
ReentrantLock
();
}
}
/**
/**
...
@@ -275,6 +279,10 @@ public class AdminResource {
...
@@ -275,6 +279,10 @@ public class AdminResource {
return
metrics
;
return
metrics
;
}
}
private
void
releaseExportImportLock
()
{
importExportOperationLock
.
unlock
();
}
@POST
@POST
@Path
(
"/export"
)
@Path
(
"/export"
)
@Consumes
(
Servlets
.
JSON_MEDIA_TYPE
)
@Consumes
(
Servlets
.
JSON_MEDIA_TYPE
)
...
@@ -283,6 +291,8 @@ public class AdminResource {
...
@@ -283,6 +291,8 @@ public class AdminResource {
LOG
.
debug
(
"==> AdminResource.export()"
);
LOG
.
debug
(
"==> AdminResource.export()"
);
}
}
acquireExportImportLock
(
"export"
);
ZipSink
exportSink
=
null
;
ZipSink
exportSink
=
null
;
try
{
try
{
exportSink
=
new
ZipSink
();
exportSink
=
new
ZipSink
();
...
@@ -308,6 +318,8 @@ public class AdminResource {
...
@@ -308,6 +318,8 @@ public class AdminResource {
throw
new
AtlasBaseException
(
excp
);
throw
new
AtlasBaseException
(
excp
);
}
finally
{
}
finally
{
releaseExportImportLock
();
if
(
exportSink
!=
null
)
{
if
(
exportSink
!=
null
)
{
exportSink
.
close
();
exportSink
.
close
();
}
}
...
@@ -327,6 +339,8 @@ public class AdminResource {
...
@@ -327,6 +339,8 @@ public class AdminResource {
LOG
.
debug
(
"==> AdminResource.importData(bytes.length={})"
,
bytes
.
length
);
LOG
.
debug
(
"==> AdminResource.importData(bytes.length={})"
,
bytes
.
length
);
}
}
acquireExportImportLock
(
"import"
);
AtlasImportResult
result
;
AtlasImportResult
result
;
try
{
try
{
...
@@ -344,6 +358,8 @@ public class AdminResource {
...
@@ -344,6 +358,8 @@ public class AdminResource {
throw
new
AtlasBaseException
(
excp
);
throw
new
AtlasBaseException
(
excp
);
}
finally
{
}
finally
{
releaseExportImportLock
();
if
(
LOG
.
isDebugEnabled
())
{
if
(
LOG
.
isDebugEnabled
())
{
LOG
.
debug
(
"<== AdminResource.importData(binary)"
);
LOG
.
debug
(
"<== AdminResource.importData(binary)"
);
}
}
...
@@ -360,6 +376,8 @@ public class AdminResource {
...
@@ -360,6 +376,8 @@ public class AdminResource {
LOG
.
debug
(
"==> AdminResource.importFile()"
);
LOG
.
debug
(
"==> AdminResource.importFile()"
);
}
}
acquireExportImportLock
(
"importFile"
);
AtlasImportResult
result
;
AtlasImportResult
result
;
try
{
try
{
...
@@ -374,6 +392,8 @@ public class AdminResource {
...
@@ -374,6 +392,8 @@ public class AdminResource {
throw
new
AtlasBaseException
(
excp
);
throw
new
AtlasBaseException
(
excp
);
}
finally
{
}
finally
{
releaseExportImportLock
();
if
(
LOG
.
isDebugEnabled
())
{
if
(
LOG
.
isDebugEnabled
())
{
LOG
.
debug
(
"<== AdminResource.importFile()"
);
LOG
.
debug
(
"<== AdminResource.importFile()"
);
}
}
...
@@ -407,4 +427,15 @@ public class AdminResource {
...
@@ -407,4 +427,15 @@ public class AdminResource {
return
ret
;
return
ret
;
}
}
private
void
acquireExportImportLock
(
String
activity
)
throws
AtlasBaseException
{
boolean
alreadyLocked
=
importExportOperationLock
.
isLocked
();
if
(
alreadyLocked
)
{
LOG
.
warn
(
"Another export or import is currently in progress..aborting this "
+
activity
,
Thread
.
currentThread
().
getName
());
throw
new
AtlasBaseException
(
AtlasErrorCode
.
FAILED_TO_OBTAIN_IMPORT_EXPORT_LOCK
);
}
importExportOperationLock
.
lock
();
}
}
}
\ No newline at end of file
webapp/src/main/java/org/apache/atlas/web/resources/ExportService.java
View file @
1d85e95f
...
@@ -75,7 +75,7 @@ public class ExportService {
...
@@ -75,7 +75,7 @@ public class ExportService {
public
AtlasExportResult
run
(
ZipSink
exportSink
,
AtlasExportRequest
request
,
String
userName
,
String
hostName
,
public
AtlasExportResult
run
(
ZipSink
exportSink
,
AtlasExportRequest
request
,
String
userName
,
String
hostName
,
String
requestingIP
)
throws
AtlasBaseException
{
String
requestingIP
)
throws
AtlasBaseException
{
long
startTimestamp
=
System
.
currentTimeMillis
();
ExportContext
context
=
new
ExportContext
(
new
AtlasExportResult
(
request
,
userName
,
hostName
,
requestingIP
,
ExportContext
context
=
new
ExportContext
(
new
AtlasExportResult
(
request
,
userName
,
hostName
,
requestingIP
,
System
.
currentTimeMillis
()),
exportSink
);
System
.
currentTimeMillis
()),
exportSink
);
...
@@ -90,6 +90,9 @@ public class ExportService {
...
@@ -90,6 +90,9 @@ public class ExportService {
context
.
sink
.
setTypesDef
(
context
.
result
.
getData
().
getTypesDef
());
context
.
sink
.
setTypesDef
(
context
.
result
.
getData
().
getTypesDef
());
context
.
result
.
setData
(
null
);
context
.
result
.
setData
(
null
);
context
.
result
.
setOperationStatus
(
AtlasExportResult
.
OperationStatus
.
SUCCESS
);
context
.
result
.
setOperationStatus
(
AtlasExportResult
.
OperationStatus
.
SUCCESS
);
long
endTimestamp
=
System
.
currentTimeMillis
();
context
.
result
.
incrementMeticsCounter
(
"duration"
,
(
int
)
(
endTimestamp
-
startTimestamp
));
context
.
sink
.
setResult
(
context
.
result
);
context
.
sink
.
setResult
(
context
.
result
);
}
catch
(
Exception
ex
)
{
}
catch
(
Exception
ex
)
{
LOG
.
error
(
"Operation failed: "
,
ex
);
LOG
.
error
(
"Operation failed: "
,
ex
);
...
@@ -175,7 +178,7 @@ public class ExportService {
...
@@ -175,7 +178,7 @@ public class ExportService {
context
.
sink
.
add
(
entity
);
context
.
sink
.
add
(
entity
);
context
.
result
.
incrementMeticsCounter
(
String
.
format
(
"entity:%s"
,
entity
.
getTypeName
()));
context
.
result
.
incrementMeticsCounter
(
String
.
format
(
"entity:%s"
,
entity
.
getTypeName
()));
context
.
result
.
incrementMeticsCounter
(
"
E
ntities"
);
context
.
result
.
incrementMeticsCounter
(
"
e
ntities"
);
if
(
context
.
guidsProcessed
.
size
()
%
10
==
0
)
{
if
(
context
.
guidsProcessed
.
size
()
%
10
==
0
)
{
LOG
.
info
(
"export(): in progress.. number of entities exported: {}"
,
context
.
guidsProcessed
.
size
());
LOG
.
info
(
"export(): in progress.. number of entities exported: {}"
,
context
.
guidsProcessed
.
size
());
...
@@ -195,7 +198,7 @@ public class ExportService {
...
@@ -195,7 +198,7 @@ public class ExportService {
AtlasClassificationDef
cd
=
typeRegistry
.
getClassificationDefByName
(
c
.
getTypeName
());
AtlasClassificationDef
cd
=
typeRegistry
.
getClassificationDefByName
(
c
.
getTypeName
());
typesDef
.
getClassificationDefs
().
add
(
cd
);
typesDef
.
getClassificationDefs
().
add
(
cd
);
result
.
incrementMeticsCounter
(
"
C
lassification"
);
result
.
incrementMeticsCounter
(
"
typedef:c
lassification"
);
}
}
}
}
}
}
...
@@ -208,7 +211,7 @@ public class ExportService {
...
@@ -208,7 +211,7 @@ public class ExportService {
AtlasEntityDef
typeDefinition
=
typeRegistry
.
getEntityDefByName
(
typeName
);
AtlasEntityDef
typeDefinition
=
typeRegistry
.
getEntityDefByName
(
typeName
);
typesDef
.
getEntityDefs
().
add
(
typeDefinition
);
typesDef
.
getEntityDefs
().
add
(
typeDefinition
);
result
.
incrementMeticsCounter
(
"
Type(s)"
);
result
.
incrementMeticsCounter
(
"
typedef:"
+
typeDefinition
.
getName
()
);
}
}
}
}
...
...
webapp/src/main/java/org/apache/atlas/web/resources/ImportService.java
View file @
1d85e95f
...
@@ -20,6 +20,7 @@ package org.apache.atlas.web.resources;
...
@@ -20,6 +20,7 @@ package org.apache.atlas.web.resources;
import
org.apache.atlas.AtlasErrorCode
;
import
org.apache.atlas.AtlasErrorCode
;
import
org.apache.atlas.model.impexp.AtlasImportRequest
;
import
org.apache.atlas.model.impexp.AtlasImportRequest
;
import
org.apache.atlas.model.impexp.AtlasImportResult
;
import
org.apache.atlas.model.impexp.AtlasImportResult
;
import
org.apache.atlas.model.instance.EntityMutationResponse
;
import
org.apache.atlas.model.typedef.*
;
import
org.apache.atlas.model.typedef.*
;
import
org.apache.atlas.repository.store.graph.AtlasEntityStore
;
import
org.apache.atlas.repository.store.graph.AtlasEntityStore
;
import
org.apache.commons.io.FileUtils
;
import
org.apache.commons.io.FileUtils
;
...
@@ -31,6 +32,7 @@ import org.slf4j.LoggerFactory;
...
@@ -31,6 +32,7 @@ import org.slf4j.LoggerFactory;
import
java.io.*
;
import
java.io.*
;
import
java.util.List
;
import
java.util.List
;
import
java.util.concurrent.TimeUnit
;
public
class
ImportService
{
public
class
ImportService
{
...
@@ -39,6 +41,9 @@ public class ImportService {
...
@@ -39,6 +41,9 @@ public class ImportService {
private
final
AtlasTypeDefStore
typeDefStore
;
private
final
AtlasTypeDefStore
typeDefStore
;
private
final
AtlasEntityStore
entityStore
;
private
final
AtlasEntityStore
entityStore
;
private
long
startTimestamp
;
private
long
endTimestamp
;
public
ImportService
(
final
AtlasTypeDefStore
typeDefStore
,
final
AtlasEntityStore
entityStore
)
{
public
ImportService
(
final
AtlasTypeDefStore
typeDefStore
,
final
AtlasEntityStore
entityStore
)
{
this
.
typeDefStore
=
typeDefStore
;
this
.
typeDefStore
=
typeDefStore
;
...
@@ -52,6 +57,7 @@ public class ImportService {
...
@@ -52,6 +57,7 @@ public class ImportService {
try
{
try
{
LOG
.
info
(
"==> import(user={}, from={})"
,
userName
,
requestingIP
);
LOG
.
info
(
"==> import(user={}, from={})"
,
userName
,
requestingIP
);
startTimestamp
=
System
.
currentTimeMillis
();
processTypes
(
source
.
getTypesDef
(),
result
);
processTypes
(
source
.
getTypesDef
(),
result
);
processEntities
(
source
,
result
);
processEntities
(
source
,
result
);
...
@@ -65,12 +71,7 @@ public class ImportService {
...
@@ -65,12 +71,7 @@ public class ImportService {
throw
new
AtlasBaseException
(
excp
);
throw
new
AtlasBaseException
(
excp
);
}
finally
{
}
finally
{
try
{
source
.
close
();
source
.
close
();
}
catch
(
IOException
e
)
{
// ignore
}
LOG
.
info
(
"<== import(user={}, from={}): status={}"
,
userName
,
requestingIP
,
result
.
getOperationStatus
());
LOG
.
info
(
"<== import(user={}, from={}): status={}"
,
userName
,
requestingIP
,
result
.
getOperationStatus
());
}
}
...
@@ -118,10 +119,14 @@ public class ImportService {
...
@@ -118,10 +119,14 @@ public class ImportService {
setGuidToEmpty
(
typeDefinitionMap
.
getEntityDefs
());
setGuidToEmpty
(
typeDefinitionMap
.
getEntityDefs
());
typeDefStore
.
updateTypesDef
(
typeDefinitionMap
);
typeDefStore
.
updateTypesDef
(
typeDefinitionMap
);
result
.
incrementMeticsCounter
(
"Enum(s)"
,
typeDefinitionMap
.
getEnumDefs
().
size
());
updateMetricsForTypesDef
(
typeDefinitionMap
,
result
);
result
.
incrementMeticsCounter
(
"Struct(s)"
,
typeDefinitionMap
.
getStructDefs
().
size
());
}
result
.
incrementMeticsCounter
(
"Classification(s)"
,
typeDefinitionMap
.
getClassificationDefs
().
size
());
result
.
incrementMeticsCounter
(
"Entity definition(s)"
,
typeDefinitionMap
.
getEntityDefs
().
size
());
private
void
updateMetricsForTypesDef
(
AtlasTypesDef
typeDefinitionMap
,
AtlasImportResult
result
)
{
result
.
incrementMeticsCounter
(
"typedef:classification"
,
typeDefinitionMap
.
getClassificationDefs
().
size
());
result
.
incrementMeticsCounter
(
"typedef:enum"
,
typeDefinitionMap
.
getEnumDefs
().
size
());
result
.
incrementMeticsCounter
(
"typedef:entitydef"
,
typeDefinitionMap
.
getEntityDefs
().
size
());
result
.
incrementMeticsCounter
(
"typedef:struct"
,
typeDefinitionMap
.
getStructDefs
().
size
());
}
}
private
void
setGuidToEmpty
(
List
<
AtlasEntityDef
>
entityDefList
)
{
private
void
setGuidToEmpty
(
List
<
AtlasEntityDef
>
entityDefList
)
{
...
@@ -131,7 +136,9 @@ public class ImportService {
...
@@ -131,7 +136,9 @@ public class ImportService {
}
}
private
void
processEntities
(
ZipSource
importSource
,
AtlasImportResult
result
)
throws
AtlasBaseException
{
private
void
processEntities
(
ZipSource
importSource
,
AtlasImportResult
result
)
throws
AtlasBaseException
{
this
.
entityStore
.
createOrUpdate
(
importSource
,
false
);
this
.
entityStore
.
bulkImport
(
importSource
,
result
);
result
.
incrementMeticsCounter
(
"Entities"
,
importSource
.
getCreationOrder
().
size
());
endTimestamp
=
System
.
currentTimeMillis
();
result
.
incrementMeticsCounter
(
"Duration"
,
(
int
)
(
this
.
endTimestamp
-
this
.
startTimestamp
));
}
}
}
}
webapp/src/main/java/org/apache/atlas/web/resources/ZipSource.java
View file @
1d85e95f
...
@@ -19,7 +19,6 @@ package org.apache.atlas.web.resources;
...
@@ -19,7 +19,6 @@ package org.apache.atlas.web.resources;
import
org.codehaus.jackson.type.TypeReference
;
import
org.codehaus.jackson.type.TypeReference
;
import
org.apache.atlas.exception.AtlasBaseException
;
import
org.apache.atlas.exception.AtlasBaseException
;
import
org.apache.atlas.model.impexp.AtlasExportResult
;
import
org.apache.atlas.model.instance.AtlasEntity
;
import
org.apache.atlas.model.instance.AtlasEntity
;
import
org.apache.atlas.model.typedef.AtlasTypesDef
;
import
org.apache.atlas.model.typedef.AtlasTypesDef
;
import
org.apache.atlas.repository.store.graph.v1.EntityImportStream
;
import
org.apache.atlas.repository.store.graph.v1.EntityImportStream
;
...
@@ -28,8 +27,10 @@ import org.slf4j.Logger;
...
@@ -28,8 +27,10 @@ import org.slf4j.Logger;
import
org.slf4j.LoggerFactory
;
import
org.slf4j.LoggerFactory
;
import
java.io.*
;
import
java.io.*
;
import
java.util.HashMap
;
import
java.util.Iterator
;
import
java.util.Iterator
;
import
java.util.List
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.zip.ZipEntry
;
import
java.util.zip.ZipEntry
;
import
java.util.zip.ZipInputStream
;
import
java.util.zip.ZipInputStream
;
...
@@ -40,100 +41,70 @@ public class ZipSource implements EntityImportStream {
...
@@ -40,100 +41,70 @@ public class ZipSource implements EntityImportStream {
private
final
ByteArrayInputStream
inputStream
;
private
final
ByteArrayInputStream
inputStream
;
private
List
<
String
>
creationOrder
;
private
List
<
String
>
creationOrder
;
private
Iterator
<
String
>
iterator
;
private
Iterator
<
String
>
iterator
;
private
Map
<
String
,
String
>
guidEntityJsonMap
;
public
ZipSource
(
ByteArrayInputStream
inputStream
)
{
public
ZipSource
(
ByteArrayInputStream
inputStream
)
throws
IOException
{
this
.
inputStream
=
inputStream
;
this
.
inputStream
=
inputStream
;
guidEntityJsonMap
=
new
HashMap
<>();
updateGuidZipEntryMap
();
this
.
setCreationOrder
();
this
.
setCreationOrder
();
}
}
public
AtlasTypesDef
getTypesDef
()
throws
AtlasBaseException
{
public
AtlasTypesDef
getTypesDef
()
throws
AtlasBaseException
{
final
String
fileName
=
ZipExportFileNames
.
ATLAS_TYPESDEF_NAME
.
toString
();
final
String
fileName
=
ZipExportFileNames
.
ATLAS_TYPESDEF_NAME
.
toString
();
try
{
String
s
=
getFromCache
(
fileName
);
String
s
=
get
(
fileName
);
return
convertFromJson
(
AtlasTypesDef
.
class
,
s
);
return
convertFromJson
(
AtlasTypesDef
.
class
,
s
);
}
catch
(
IOException
e
)
{
LOG
.
error
(
String
.
format
(
"Error retrieving '%s' from zip."
,
fileName
),
e
);
return
null
;
}
}
}
public
AtlasExportResult
getExportResult
()
throws
AtlasBaseException
{
String
fileName
=
ZipExportFileNames
.
ATLAS_EXPORT_INFO_NAME
.
toString
();
try
{
String
s
=
get
(
fileName
);
return
convertFromJson
(
AtlasExportResult
.
class
,
s
);
}
catch
(
IOException
e
)
{
LOG
.
error
(
String
.
format
(
"Error retrieving '%s' from zip."
,
fileName
),
e
);
return
null
;
}
}
private
void
setCreationOrder
()
{
private
void
setCreationOrder
()
{
String
fileName
=
ZipExportFileNames
.
ATLAS_EXPORT_ORDER_NAME
.
toString
();
String
fileName
=
ZipExportFileNames
.
ATLAS_EXPORT_ORDER_NAME
.
toString
();
try
{
try
{
String
s
=
get
(
fileName
);
String
s
=
get
FromCache
(
fileName
);
this
.
creationOrder
=
convertFromJson
(
new
TypeReference
<
List
<
String
>>(){},
s
);
this
.
creationOrder
=
convertFromJson
(
new
TypeReference
<
List
<
String
>>(){},
s
);
this
.
iterator
=
this
.
creationOrder
.
iterator
();
this
.
iterator
=
this
.
creationOrder
.
iterator
();
}
catch
(
IOException
e
)
{
LOG
.
error
(
String
.
format
(
"Error retrieving '%s' from zip."
,
fileName
),
e
);
}
catch
(
AtlasBaseException
e
)
{
}
catch
(
AtlasBaseException
e
)
{
LOG
.
error
(
String
.
format
(
"Error retrieving '%s' from zip."
,
fileName
),
e
);
LOG
.
error
(
String
.
format
(
"Error retrieving '%s' from zip."
,
fileName
),
e
);
}
}
}
}
public
List
<
String
>
getCreationOrder
()
throws
AtlasBaseException
{
private
void
updateGuidZipEntryMap
()
throws
IOException
{
return
this
.
creationOrder
;
}
public
AtlasEntity
getEntity
(
String
guid
)
throws
AtlasBaseException
{
try
{
String
s
=
get
(
guid
);
return
convertFromJson
(
AtlasEntity
.
class
,
s
);
}
catch
(
IOException
e
)
{
LOG
.
error
(
String
.
format
(
"Error retrieving '%s' from zip."
,
guid
),
e
);
return
null
;
}
}
private
String
get
(
String
entryName
)
throws
IOException
{
String
ret
=
""
;
inputStream
.
reset
();
inputStream
.
reset
();
ZipInputStream
zipInputStream
=
new
ZipInputStream
(
inputStream
);
ZipInputStream
zipInputStream
=
new
ZipInputStream
(
inputStream
);
ZipEntry
zipEntry
=
zipInputStream
.
getNextEntry
();
ZipEntry
zipEntry
=
zipInputStream
.
getNextEntry
();
entryName
=
entryName
+
".json"
;
while
(
zipEntry
!=
null
)
{
while
(
zipEntry
!=
null
)
{
if
(
zipEntry
.
getName
().
equals
(
entryName
))
{
String
entryName
=
zipEntry
.
getName
().
replace
(
".json"
,
""
);
break
;
}
zipEntry
=
zipInputStream
.
getNextEntry
()
;
if
(
guidEntityJsonMap
.
containsKey
(
entryName
))
continue
;
}
if
(
zipEntry
==
null
)
continue
;
if
(
zipEntry
!=
null
)
{
ByteArrayOutputStream
os
=
new
ByteArrayOutputStream
();
byte
[]
buf
=
new
byte
[
1024
];
byte
[]
buf
=
new
byte
[
1024
];
int
n
=
0
;
int
n
=
0
;
ByteArrayOutputStream
bos
=
new
ByteArrayOutputStream
();
while
((
n
=
zipInputStream
.
read
(
buf
,
0
,
1024
))
>
-
1
)
{
while
((
n
=
zipInputStream
.
read
(
buf
,
0
,
1024
))
>
-
1
)
{
os
.
write
(
buf
,
0
,
n
);
b
os
.
write
(
buf
,
0
,
n
);
}
}
ret
=
os
.
toString
(
);
guidEntityJsonMap
.
put
(
entryName
,
bos
.
toString
()
);
}
else
{
zipEntry
=
zipInputStream
.
getNextEntry
();
LOG
.
warn
(
"{}: no such entry in zip file"
,
entryName
);
}
}
zipInputStream
.
close
();
zipInputStream
.
close
();
}
return
ret
;
public
List
<
String
>
getCreationOrder
()
throws
AtlasBaseException
{
return
this
.
creationOrder
;
}
public
AtlasEntity
getEntity
(
String
guid
)
throws
AtlasBaseException
{
String
s
=
getFromCache
(
guid
);
return
convertFromJson
(
AtlasEntity
.
class
,
s
);
}
}
private
<
T
>
T
convertFromJson
(
TypeReference
clazz
,
String
jsonData
)
throws
AtlasBaseException
{
private
<
T
>
T
convertFromJson
(
TypeReference
clazz
,
String
jsonData
)
throws
AtlasBaseException
{
...
@@ -158,8 +129,20 @@ public class ZipSource implements EntityImportStream {
...
@@ -158,8 +129,20 @@ public class ZipSource implements EntityImportStream {
}
}
}
}
public
void
close
()
throws
IOException
{
private
String
getFromCache
(
String
entryName
)
{
if
(!
guidEntityJsonMap
.
containsKey
(
entryName
))
return
""
;
return
guidEntityJsonMap
.
get
(
entryName
).
toString
();
}
public
void
close
()
{
try
{
inputStream
.
close
();
inputStream
.
close
();
guidEntityJsonMap
.
clear
();
}
catch
(
IOException
ex
)
{
LOG
.
warn
(
"{}: Error closing streams."
);
}
}
}
@Override
@Override
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment