Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
ef402516
Commit
ef402516
authored
5 years ago
by
Sarath Subramanian
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
ATLAS-3321: Introduce atlas metadata namespace
parent
25e2e461
Show whitespace changes
Inline
Side-by-side
Showing
22 changed files
with
254 additions
and
236 deletions
+254
-236
import-hbase.sh
addons/hbase-bridge/src/bin/import-hbase.sh
+1
-1
HBaseAtlasHook.java
...in/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
+21
-32
HBaseBridge.java
.../main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java
+30
-21
import-hive.sh
addons/hive-bridge/src/bin/import-hive.sh
+1
-1
HiveMetaStoreBridge.java
...ava/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
+50
-38
AtlasHiveHookContext.java
...java/org/apache/atlas/hive/hook/AtlasHiveHookContext.java
+5
-5
HiveHook.java
...ge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
+0
-9
BaseHiveEvent.java
...java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java
+23
-20
HiveITBase.java
...ridge/src/test/java/org/apache/atlas/hive/HiveITBase.java
+1
-1
HiveMetaStoreBridgeTest.java
...org/apache/atlas/hive/bridge/HiveMetaStoreBridgeTest.java
+21
-22
AtlasImpalaHookContext.java
.../org/apache/atlas/impala/hook/AtlasImpalaHookContext.java
+6
-7
ImpalaLineageHook.java
.../java/org/apache/atlas/impala/hook/ImpalaLineageHook.java
+0
-8
BaseImpalaEvent.java
.../org/apache/atlas/impala/hook/events/BaseImpalaEvent.java
+1
-1
ImpalaLineageITBase.java
...est/java/org/apache/atlas/impala/ImpalaLineageITBase.java
+2
-2
ImpalaLineageToolIT.java
...est/java/org/apache/atlas/impala/ImpalaLineageToolIT.java
+9
-9
ImpalaLineageHookIT.java
...ava/org/apache/atlas/impala/hook/ImpalaLineageHookIT.java
+1
-1
import-kafka.sh
addons/kafka-bridge/src/bin/import-kafka.sh
+1
-1
KafkaBridge.java
.../main/java/org/apache/atlas/kafka/bridge/KafkaBridge.java
+17
-8
SqoopHook.java
.../src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java
+20
-12
StormAtlasHook.java
...main/java/org/apache/atlas/storm/hook/StormAtlasHook.java
+21
-31
AtlasConstants.java
common/src/main/java/org/apache/atlas/AtlasConstants.java
+6
-6
AtlasHook.java
...cation/src/main/java/org/apache/atlas/hook/AtlasHook.java
+17
-0
No files found.
addons/hbase-bridge/src/bin/import-hbase.sh
View file @
ef402516
...
...
@@ -114,7 +114,7 @@ else
exit
1
fi
CP
=
"
${
ATLASCPPATH
}
:
${
HBASE_CP
}
:
${
HADOOP_CP
}
"
CP
=
"
${
HBASE_CP
}
:
${
HADOOP_CP
}
:
${
ATLASCPPATH
}
"
# If running in cygwin, convert pathnames and classpath to Windows format.
if
[
"
${
CYGWIN
}
"
==
"true"
]
...
...
This diff is collapsed.
Click to expand it.
addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
View file @
ef402516
...
...
@@ -30,7 +30,6 @@ import org.apache.atlas.model.notification.HookNotification.EntityDeleteRequestV
import
org.apache.atlas.model.notification.HookNotification.EntityUpdateRequestV2
;
import
org.apache.atlas.type.AtlasTypeUtil
;
import
org.apache.commons.collections.CollectionUtils
;
import
org.apache.commons.configuration.Configuration
;
import
org.apache.hadoop.hbase.TableName
;
import
org.apache.hadoop.hbase.NamespaceDescriptor
;
import
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor
;
...
...
@@ -55,11 +54,8 @@ public class HBaseAtlasHook extends AtlasHook {
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
HBaseAtlasHook
.
class
);
public
static
final
String
HBASE_CLUSTER_NAME
=
"atlas.cluster.name"
;
public
static
final
String
DEFAULT_CLUSTER_NAME
=
"primary"
;
public
static
final
String
ATTR_DESCRIPTION
=
"description"
;
public
static
final
String
ATTR_ATLAS_ENDPOINT
=
"atlas.rest.address"
;
public
static
final
String
ATTR_COMMENT
=
"comment"
;
public
static
final
String
ATTR_PARAMETERS
=
"parameters"
;
public
static
final
String
ATTR_URI
=
"uri"
;
public
static
final
String
ATTR_NAMESPACE
=
"namespace"
;
...
...
@@ -106,7 +102,6 @@ public class HBaseAtlasHook extends AtlasHook {
public
static
final
String
HBASE_COLUMN_FAMILY_QUALIFIED_NAME_FORMAT
=
"%s:%s.%s@%s"
;
private
static
final
String
REFERENCEABLE_ATTRIBUTE_NAME
=
"qualifiedName"
;
private
String
clusterName
=
null
;
private
static
volatile
HBaseAtlasHook
me
;
...
...
@@ -141,7 +136,7 @@ public class HBaseAtlasHook extends AtlasHook {
ret
=
me
;
if
(
ret
==
null
)
{
me
=
ret
=
new
HBaseAtlasHook
(
atlasProperties
);
me
=
ret
=
new
HBaseAtlasHook
();
}
}
}
catch
(
Exception
e
)
{
...
...
@@ -152,15 +147,9 @@ public class HBaseAtlasHook extends AtlasHook {
return
ret
;
}
public
HBaseAtlasHook
(
Configuration
atlasProperties
)
{
this
(
atlasProperties
.
getString
(
HBASE_CLUSTER_NAME
,
DEFAULT_CLUSTER_NAME
));
public
HBaseAtlasHook
()
{
}
public
HBaseAtlasHook
(
String
clusterName
)
{
this
.
clusterName
=
clusterName
;
}
public
void
createAtlasInstances
(
HBaseOperationContext
hbaseOperationContext
)
{
OPERATION
operation
=
hbaseOperationContext
.
getOperation
();
...
...
@@ -210,7 +199,7 @@ public class HBaseAtlasHook extends AtlasHook {
}
private
void
deleteNameSpaceInstance
(
HBaseOperationContext
hbaseOperationContext
)
{
String
nameSpaceQName
=
getNameSpaceQualifiedName
(
clusterName
,
hbaseOperationContext
.
getNameSpace
());
String
nameSpaceQName
=
getNameSpaceQualifiedName
(
getMetadataNamespace
()
,
hbaseOperationContext
.
getNameSpace
());
AtlasObjectId
nameSpaceId
=
new
AtlasObjectId
(
HBaseDataTypes
.
HBASE_NAMESPACE
.
getName
(),
REFERENCEABLE_ATTRIBUTE_NAME
,
nameSpaceQName
);
LOG
.
info
(
"Delete NameSpace {}"
,
nameSpaceQName
);
...
...
@@ -259,7 +248,7 @@ public class HBaseAtlasHook extends AtlasHook {
}
String
tableNameStr
=
tableName
.
getNameAsString
();
String
tableQName
=
getTableQualifiedName
(
clusterName
,
nameSpaceName
,
tableNameStr
);
String
tableQName
=
getTableQualifiedName
(
getMetadataNamespace
()
,
nameSpaceName
,
tableNameStr
);
AtlasObjectId
tableId
=
new
AtlasObjectId
(
HBaseDataTypes
.
HBASE_TABLE
.
getName
(),
REFERENCEABLE_ATTRIBUTE_NAME
,
tableQName
);
LOG
.
info
(
"Delete Table {}"
,
tableQName
);
...
...
@@ -302,7 +291,7 @@ public class HBaseAtlasHook extends AtlasHook {
String
tableNameStr
=
tableName
.
getNameAsString
();
String
columnFamilyName
=
hbaseOperationContext
.
getColummFamily
();
String
columnFamilyQName
=
getColumnFamilyQualifiedName
(
clusterName
,
nameSpaceName
,
tableNameStr
,
columnFamilyName
);
String
columnFamilyQName
=
getColumnFamilyQualifiedName
(
getMetadataNamespace
()
,
nameSpaceName
,
tableNameStr
,
columnFamilyName
);
AtlasObjectId
columnFamilyId
=
new
AtlasObjectId
(
HBaseDataTypes
.
HBASE_COLUMN_FAMILY
.
getName
(),
REFERENCEABLE_ATTRIBUTE_NAME
,
columnFamilyQName
);
LOG
.
info
(
"Delete ColumnFamily {}"
,
columnFamilyQName
);
...
...
@@ -314,48 +303,48 @@ public class HBaseAtlasHook extends AtlasHook {
/**
* Construct the qualified name used to uniquely identify a ColumnFamily instance in Atlas.
*
* @param
clusterName Nam
e of the cluster to which the HBase component belongs
* @param
metadataNamespace Metadata namespac
e of the cluster to which the HBase component belongs
* @param nameSpace Name of the HBase database to which the Table belongs
* @param tableName Name of the HBase table
* @param columnFamily Name of the ColumnFamily
* @return Unique qualified name to identify the Table instance in Atlas.
*/
public
static
String
getColumnFamilyQualifiedName
(
String
clusterNam
e
,
String
nameSpace
,
String
tableName
,
String
columnFamily
)
{
if
(
clusterNam
e
==
null
||
nameSpace
==
null
||
tableName
==
null
||
columnFamily
==
null
)
{
public
static
String
getColumnFamilyQualifiedName
(
String
metadataNamespac
e
,
String
nameSpace
,
String
tableName
,
String
columnFamily
)
{
if
(
metadataNamespac
e
==
null
||
nameSpace
==
null
||
tableName
==
null
||
columnFamily
==
null
)
{
return
null
;
}
else
{
return
String
.
format
(
HBASE_COLUMN_FAMILY_QUALIFIED_NAME_FORMAT
,
nameSpace
.
toLowerCase
(),
stripNameSpace
(
tableName
.
toLowerCase
()),
columnFamily
.
toLowerCase
(),
clusterNam
e
);
return
String
.
format
(
HBASE_COLUMN_FAMILY_QUALIFIED_NAME_FORMAT
,
nameSpace
.
toLowerCase
(),
stripNameSpace
(
tableName
.
toLowerCase
()),
columnFamily
.
toLowerCase
(),
metadataNamespac
e
);
}
}
/**
* Construct the qualified name used to uniquely identify a Table instance in Atlas.
*
* @param
clusterName Nam
e of the cluster to which the HBase component belongs
* @param
metadataNamespace Metadata namespac
e of the cluster to which the HBase component belongs
* @param nameSpace Name of the HBase database to which the Table belongs
* @param tableName Name of the HBase table
* @return Unique qualified name to identify the Table instance in Atlas.
*/
public
static
String
getTableQualifiedName
(
String
clusterNam
e
,
String
nameSpace
,
String
tableName
)
{
if
(
clusterNam
e
==
null
||
nameSpace
==
null
||
tableName
==
null
)
{
public
static
String
getTableQualifiedName
(
String
metadataNamespac
e
,
String
nameSpace
,
String
tableName
)
{
if
(
metadataNamespac
e
==
null
||
nameSpace
==
null
||
tableName
==
null
)
{
return
null
;
}
else
{
return
String
.
format
(
HBASE_TABLE_QUALIFIED_NAME_FORMAT
,
nameSpace
.
toLowerCase
(),
stripNameSpace
(
tableName
.
toLowerCase
()),
clusterNam
e
);
return
String
.
format
(
HBASE_TABLE_QUALIFIED_NAME_FORMAT
,
nameSpace
.
toLowerCase
(),
stripNameSpace
(
tableName
.
toLowerCase
()),
metadataNamespac
e
);
}
}
/**
* Construct the qualified name used to uniquely identify a HBase NameSpace instance in Atlas.
*
* @param
clusterName Nam
e of the cluster to which the HBase component belongs
* @param
metadataNamespace Metadata namespac
e of the cluster to which the HBase component belongs
* @param nameSpace
* @return Unique qualified name to identify the HBase NameSpace instance in Atlas.
*/
public
static
String
getNameSpaceQualifiedName
(
String
clusterNam
e
,
String
nameSpace
)
{
if
(
clusterNam
e
==
null
||
nameSpace
==
null
)
{
public
static
String
getNameSpaceQualifiedName
(
String
metadataNamespac
e
,
String
nameSpace
)
{
if
(
metadataNamespac
e
==
null
||
nameSpace
==
null
)
{
return
null
;
}
else
{
return
String
.
format
(
HBASE_NAMESPACE_QUALIFIED_NAME
,
nameSpace
.
toLowerCase
(),
clusterNam
e
);
return
String
.
format
(
HBASE_NAMESPACE_QUALIFIED_NAME
,
nameSpace
.
toLowerCase
(),
metadataNamespac
e
);
}
}
...
...
@@ -375,8 +364,8 @@ public class HBaseAtlasHook extends AtlasHook {
Date
now
=
new
Date
(
System
.
currentTimeMillis
());
nameSpace
.
setAttribute
(
ATTR_NAME
,
nameSpaceName
);
nameSpace
.
setAttribute
(
REFERENCEABLE_ATTRIBUTE_NAME
,
getNameSpaceQualifiedName
(
clusterName
,
nameSpaceName
));
nameSpace
.
setAttribute
(
AtlasConstants
.
CLUSTER_NAME_ATTRIBUTE
,
clusterName
);
nameSpace
.
setAttribute
(
REFERENCEABLE_ATTRIBUTE_NAME
,
getNameSpaceQualifiedName
(
getMetadataNamespace
()
,
nameSpaceName
));
nameSpace
.
setAttribute
(
AtlasConstants
.
CLUSTER_NAME_ATTRIBUTE
,
getMetadataNamespace
()
);
nameSpace
.
setAttribute
(
ATTR_DESCRIPTION
,
nameSpaceName
);
nameSpace
.
setAttribute
(
ATTR_PARAMETERS
,
hbaseOperationContext
.
getHbaseConf
());
nameSpace
.
setAttribute
(
ATTR_OWNER
,
hbaseOperationContext
.
getOwner
());
...
...
@@ -393,7 +382,7 @@ public class HBaseAtlasHook extends AtlasHook {
AtlasEntity
table
=
new
AtlasEntity
(
HBaseDataTypes
.
HBASE_TABLE
.
getName
());
String
tableName
=
getTableName
(
hbaseOperationContext
);
String
nameSpaceName
=
(
String
)
nameSpace
.
getAttribute
(
ATTR_NAME
);
String
tableQName
=
getTableQualifiedName
(
clusterName
,
nameSpaceName
,
tableName
);
String
tableQName
=
getTableQualifiedName
(
getMetadataNamespace
()
,
nameSpaceName
,
tableName
);
OPERATION
operation
=
hbaseOperationContext
.
getOperation
();
Date
now
=
new
Date
(
System
.
currentTimeMillis
());
...
...
@@ -455,7 +444,7 @@ public class HBaseAtlasHook extends AtlasHook {
String
columnFamilyName
=
columnFamilyDescriptor
.
getNameAsString
();
String
tableName
=
(
String
)
table
.
getAttribute
(
ATTR_NAME
);
String
nameSpaceName
=
(
String
)
nameSpace
.
getAttribute
(
ATTR_NAME
);
String
columnFamilyQName
=
getColumnFamilyQualifiedName
(
clusterName
,
nameSpaceName
,
tableName
,
columnFamilyName
);
String
columnFamilyQName
=
getColumnFamilyQualifiedName
(
getMetadataNamespace
()
,
nameSpaceName
,
tableName
,
columnFamilyName
);
Date
now
=
new
Date
(
System
.
currentTimeMillis
());
columnFamily
.
setAttribute
(
ATTR_NAME
,
columnFamilyName
);
...
...
This diff is collapsed.
Click to expand it.
addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java
View file @
ef402516
...
...
@@ -69,8 +69,9 @@ public class HBaseBridge {
private
static
final
int
EXIT_CODE_FAILED
=
1
;
private
static
final
String
ATLAS_ENDPOINT
=
"atlas.rest.address"
;
private
static
final
String
DEFAULT_ATLAS_URL
=
"http://localhost:21000/"
;
private
static
final
String
HBASE_CLUSTER_NAME
=
"atlas.cluster.name"
;
private
static
final
String
CLUSTER_NAME_KEY
=
"atlas.cluster.name"
;
private
static
final
String
DEFAULT_CLUSTER_NAME
=
"primary"
;
private
static
final
String
HBASE_METADATA_NAMESPACE
=
"atlas.metadata.namespace"
;
private
static
final
String
QUALIFIED_NAME
=
"qualifiedName"
;
private
static
final
String
NAME
=
"name"
;
private
static
final
String
URI
=
"uri"
;
...
...
@@ -115,7 +116,7 @@ public class HBaseBridge {
private
static
final
String
HBASE_TABLE_QUALIFIED_NAME_FORMAT
=
"%s:%s@%s"
;
private
static
final
String
HBASE_COLUMN_FAMILY_QUALIFIED_NAME_FORMAT
=
"%s:%s.%s@%s"
;
private
final
String
clusterNam
e
;
private
final
String
metadataNamespac
e
;
private
final
AtlasClientV2
atlasClientV2
;
private
final
Admin
hbaseAdmin
;
...
...
@@ -205,7 +206,7 @@ public class HBaseBridge {
public
HBaseBridge
(
Configuration
atlasConf
,
AtlasClientV2
atlasClientV2
)
throws
Exception
{
this
.
atlasClientV2
=
atlasClientV2
;
this
.
clusterName
=
atlasConf
.
getString
(
HBASE_CLUSTER_NAME
,
DEFAULT_CLUSTER_NAME
);
this
.
metadataNamespace
=
getMetadataNamespace
(
atlasConf
);
org
.
apache
.
hadoop
.
conf
.
Configuration
conf
=
HBaseConfiguration
.
create
();
...
...
@@ -220,6 +221,14 @@ public class HBaseBridge {
hbaseAdmin
=
conn
.
getAdmin
();
}
private
String
getMetadataNamespace
(
Configuration
config
)
{
return
config
.
getString
(
HBASE_METADATA_NAMESPACE
,
getClusterName
(
config
));
}
private
String
getClusterName
(
Configuration
config
)
{
return
config
.
getString
(
CLUSTER_NAME_KEY
,
DEFAULT_CLUSTER_NAME
);
}
private
boolean
importHBaseEntities
(
String
namespaceToImport
,
String
tableToImport
)
throws
Exception
{
boolean
ret
=
false
;
...
...
@@ -367,7 +376,7 @@ public class HBaseBridge {
protected
AtlasEntityWithExtInfo
createOrUpdateNameSpace
(
NamespaceDescriptor
namespaceDescriptor
)
throws
Exception
{
String
nsName
=
namespaceDescriptor
.
getName
();
String
nsQualifiedName
=
getNameSpaceQualifiedName
(
clusterNam
e
,
nsName
);
String
nsQualifiedName
=
getNameSpaceQualifiedName
(
metadataNamespac
e
,
nsName
);
AtlasEntityWithExtInfo
nsEntity
=
findNameSpaceEntityInAtlas
(
nsQualifiedName
);
if
(
nsEntity
==
null
)
{
...
...
@@ -390,7 +399,7 @@ public class HBaseBridge {
protected
AtlasEntityWithExtInfo
createOrUpdateTable
(
String
nameSpace
,
String
tableName
,
AtlasEntity
nameSapceEntity
,
TableDescriptor
htd
,
ColumnFamilyDescriptor
[]
hcdts
)
throws
Exception
{
String
owner
=
htd
.
getOwnerString
();
String
tblQualifiedName
=
getTableQualifiedName
(
clusterNam
e
,
nameSpace
,
tableName
);
String
tblQualifiedName
=
getTableQualifiedName
(
metadataNamespac
e
,
nameSpace
,
tableName
);
AtlasEntityWithExtInfo
ret
=
findTableEntityInAtlas
(
tblQualifiedName
);
if
(
ret
==
null
)
{
...
...
@@ -436,7 +445,7 @@ public class HBaseBridge {
for
(
ColumnFamilyDescriptor
columnFamilyDescriptor
:
hcdts
)
{
String
cfName
=
columnFamilyDescriptor
.
getNameAsString
();
String
cfQualifiedName
=
getColumnFamilyQualifiedName
(
clusterNam
e
,
nameSpace
,
tableName
,
cfName
);
String
cfQualifiedName
=
getColumnFamilyQualifiedName
(
metadataNamespac
e
,
nameSpace
,
tableName
,
cfName
);
AtlasEntityWithExtInfo
cfEntity
=
findColumnFamiltyEntityInAtlas
(
cfQualifiedName
);
if
(
cfEntity
==
null
)
{
...
...
@@ -516,10 +525,10 @@ public class HBaseBridge {
ret
=
nsEtity
;
}
String
qualifiedName
=
getNameSpaceQualifiedName
(
clusterNam
e
,
nameSpace
);
String
qualifiedName
=
getNameSpaceQualifiedName
(
metadataNamespac
e
,
nameSpace
);
ret
.
setAttribute
(
QUALIFIED_NAME
,
qualifiedName
);
ret
.
setAttribute
(
CLUSTERNAME
,
clusterNam
e
);
ret
.
setAttribute
(
CLUSTERNAME
,
metadataNamespac
e
);
ret
.
setAttribute
(
NAME
,
nameSpace
);
ret
.
setAttribute
(
DESCRIPTION_ATTR
,
nameSpace
);
...
...
@@ -535,10 +544,10 @@ public class HBaseBridge {
ret
=
atlasEntity
;
}
String
tableQualifiedName
=
getTableQualifiedName
(
clusterNam
e
,
nameSpace
,
tableName
);
String
tableQualifiedName
=
getTableQualifiedName
(
metadataNamespac
e
,
nameSpace
,
tableName
);
ret
.
setAttribute
(
QUALIFIED_NAME
,
tableQualifiedName
);
ret
.
setAttribute
(
CLUSTERNAME
,
clusterNam
e
);
ret
.
setAttribute
(
CLUSTERNAME
,
metadataNamespac
e
);
ret
.
setAttribute
(
NAMESPACE
,
AtlasTypeUtil
.
getAtlasObjectId
(
nameSpaceEntity
));
ret
.
setAttribute
(
NAME
,
tableName
);
ret
.
setAttribute
(
DESCRIPTION_ATTR
,
tableName
);
...
...
@@ -564,10 +573,10 @@ public class HBaseBridge {
}
String
cfName
=
hcdt
.
getNameAsString
();
String
cfQualifiedName
=
getColumnFamilyQualifiedName
(
clusterNam
e
,
nameSpace
,
tableName
,
cfName
);
String
cfQualifiedName
=
getColumnFamilyQualifiedName
(
metadataNamespac
e
,
nameSpace
,
tableName
,
cfName
);
ret
.
setAttribute
(
QUALIFIED_NAME
,
cfQualifiedName
);
ret
.
setAttribute
(
CLUSTERNAME
,
clusterNam
e
);
ret
.
setAttribute
(
CLUSTERNAME
,
metadataNamespac
e
);
ret
.
setAttribute
(
TABLE
,
tableId
);
ret
.
setAttribute
(
NAME
,
cfName
);
ret
.
setAttribute
(
DESCRIPTION_ATTR
,
cfName
);
...
...
@@ -637,37 +646,37 @@ public class HBaseBridge {
/**
* Construct the qualified name used to uniquely identify a ColumnFamily instance in Atlas.
* @param
clusterName Nam
e of the cluster to which the Hbase component belongs
* @param
metadataNamespace Metadata namespac
e of the cluster to which the Hbase component belongs
* @param nameSpace Name of the Hbase database to which the Table belongs
* @param tableName Name of the Hbase table
* @param columnFamily Name of the ColumnFamily
* @return Unique qualified name to identify the Table instance in Atlas.
*/
private
static
String
getColumnFamilyQualifiedName
(
String
clusterNam
e
,
String
nameSpace
,
String
tableName
,
String
columnFamily
)
{
private
static
String
getColumnFamilyQualifiedName
(
String
metadataNamespac
e
,
String
nameSpace
,
String
tableName
,
String
columnFamily
)
{
tableName
=
stripNameSpace
(
tableName
.
toLowerCase
());
return
String
.
format
(
HBASE_COLUMN_FAMILY_QUALIFIED_NAME_FORMAT
,
nameSpace
.
toLowerCase
(),
tableName
,
columnFamily
.
toLowerCase
(),
clusterNam
e
);
return
String
.
format
(
HBASE_COLUMN_FAMILY_QUALIFIED_NAME_FORMAT
,
nameSpace
.
toLowerCase
(),
tableName
,
columnFamily
.
toLowerCase
(),
metadataNamespac
e
);
}
/**
* Construct the qualified name used to uniquely identify a Table instance in Atlas.
* @param
clusterName Nam
e of the cluster to which the Hbase component belongs
* @param
metadataNamespace Metadata namespac
e of the cluster to which the Hbase component belongs
* @param nameSpace Name of the Hbase database to which the Table belongs
* @param tableName Name of the Hbase table
* @return Unique qualified name to identify the Table instance in Atlas.
*/
private
static
String
getTableQualifiedName
(
String
clusterNam
e
,
String
nameSpace
,
String
tableName
)
{
private
static
String
getTableQualifiedName
(
String
metadataNamespac
e
,
String
nameSpace
,
String
tableName
)
{
tableName
=
stripNameSpace
(
tableName
.
toLowerCase
());
return
String
.
format
(
HBASE_TABLE_QUALIFIED_NAME_FORMAT
,
nameSpace
.
toLowerCase
(),
tableName
,
clusterNam
e
);
return
String
.
format
(
HBASE_TABLE_QUALIFIED_NAME_FORMAT
,
nameSpace
.
toLowerCase
(),
tableName
,
metadataNamespac
e
);
}
/**
* Construct the qualified name used to uniquely identify a Hbase NameSpace instance in Atlas.
* @param
clusterName Nam
e of the cluster to which the Hbase component belongs
* @param
metadataNamespace Metadata namespac
e of the cluster to which the Hbase component belongs
* @param nameSpace Name of the NameSpace
* @return Unique qualified name to identify the HBase NameSpace instance in Atlas.
*/
private
static
String
getNameSpaceQualifiedName
(
String
clusterNam
e
,
String
nameSpace
)
{
return
String
.
format
(
HBASE_NAMESPACE_QUALIFIED_NAME
,
nameSpace
.
toLowerCase
(),
clusterNam
e
);
private
static
String
getNameSpaceQualifiedName
(
String
metadataNamespac
e
,
String
nameSpace
)
{
return
String
.
format
(
HBASE_NAMESPACE_QUALIFIED_NAME
,
nameSpace
.
toLowerCase
(),
metadataNamespac
e
);
}
private
static
String
stripNameSpace
(
String
tableName
){
...
...
This diff is collapsed.
Click to expand it.
addons/hive-bridge/src/bin/import-hive.sh
View file @
ef402516
...
...
@@ -109,7 +109,7 @@ else
exit
1
fi
CP
=
"
${
ATLASCPPATH
}
:
${
HIVE_CP
}
:
${
HADOOP_CP
}
"
CP
=
"
${
HIVE_CP
}
:
${
HADOOP_CP
}
:
${
ATLASCPPATH
}
"
# If running in cygwin, convert pathnames and classpath to Windows format.
if
[
"
${
CYGWIN
}
"
==
"true"
]
...
...
This diff is collapsed.
Click to expand it.
addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
View file @
ef402516
...
...
@@ -83,7 +83,8 @@ public class HiveMetaStoreBridge {
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
HiveMetaStoreBridge
.
class
);
public
static
final
String
CONF_PREFIX
=
"atlas.hook.hive."
;
public
static
final
String
HIVE_CLUSTER_NAME
=
"atlas.cluster.name"
;
public
static
final
String
CLUSTER_NAME_KEY
=
"atlas.cluster.name"
;
public
static
final
String
HIVE_METADATA_NAMESPACE
=
"atlas.metadata.namespace"
;
public
static
final
String
HDFS_PATH_CONVERT_TO_LOWER_CASE
=
CONF_PREFIX
+
"hdfs_path.convert_to_lowercase"
;
public
static
final
String
DEFAULT_CLUSTER_NAME
=
"primary"
;
public
static
final
String
TEMP_TABLE_PREFIX
=
"_temp-"
;
...
...
@@ -95,7 +96,7 @@ public class HiveMetaStoreBridge {
private
static
final
int
EXIT_CODE_FAILED
=
1
;
private
static
final
String
DEFAULT_ATLAS_URL
=
"http://localhost:21000/"
;
private
final
String
clusterNam
e
;
private
final
String
metadataNamespac
e
;
private
final
Hive
hiveClient
;
private
final
AtlasClientV2
atlasClientV2
;
private
final
boolean
convertHdfsPathToLowerCase
;
...
...
@@ -209,7 +210,10 @@ public class HiveMetaStoreBridge {
* @param hiveConf {@link HiveConf} for Hive component in the cluster
*/
public
HiveMetaStoreBridge
(
Configuration
atlasProperties
,
HiveConf
hiveConf
,
AtlasClientV2
atlasClientV2
)
throws
Exception
{
this
(
atlasProperties
.
getString
(
HIVE_CLUSTER_NAME
,
DEFAULT_CLUSTER_NAME
),
Hive
.
get
(
hiveConf
),
atlasClientV2
,
atlasProperties
.
getBoolean
(
HDFS_PATH_CONVERT_TO_LOWER_CASE
,
false
));
this
.
metadataNamespace
=
getMetadataNamespace
(
atlasProperties
);
this
.
hiveClient
=
Hive
.
get
(
hiveConf
);
this
.
atlasClientV2
=
atlasClientV2
;
this
.
convertHdfsPathToLowerCase
=
atlasProperties
.
getBoolean
(
HDFS_PATH_CONVERT_TO_LOWER_CASE
,
false
);
}
/**
...
...
@@ -220,19 +224,27 @@ public class HiveMetaStoreBridge {
this
(
atlasProperties
,
hiveConf
,
null
);
}
HiveMetaStoreBridge
(
String
clusterNam
e
,
Hive
hiveClient
,
AtlasClientV2
atlasClientV2
)
{
this
(
clusterNam
e
,
hiveClient
,
atlasClientV2
,
true
);
HiveMetaStoreBridge
(
String
metadataNamespac
e
,
Hive
hiveClient
,
AtlasClientV2
atlasClientV2
)
{
this
(
metadataNamespac
e
,
hiveClient
,
atlasClientV2
,
true
);
}
HiveMetaStoreBridge
(
String
clusterNam
e
,
Hive
hiveClient
,
AtlasClientV2
atlasClientV2
,
boolean
convertHdfsPathToLowerCase
)
{
this
.
clusterName
=
clusterNam
e
;
HiveMetaStoreBridge
(
String
metadataNamespac
e
,
Hive
hiveClient
,
AtlasClientV2
atlasClientV2
,
boolean
convertHdfsPathToLowerCase
)
{
this
.
metadataNamespace
=
metadataNamespac
e
;
this
.
hiveClient
=
hiveClient
;
this
.
atlasClientV2
=
atlasClientV2
;
this
.
convertHdfsPathToLowerCase
=
convertHdfsPathToLowerCase
;
}
public
String
getClusterName
()
{
return
clusterName
;
public
String
getMetadataNamespace
(
Configuration
config
)
{
return
config
.
getString
(
HIVE_METADATA_NAMESPACE
,
getClusterName
(
config
));
}
private
String
getClusterName
(
Configuration
config
)
{
return
config
.
getString
(
CLUSTER_NAME_KEY
,
DEFAULT_CLUSTER_NAME
);
}
public
String
getMetadataNamespace
()
{
return
metadataNamespace
;
}
public
Hive
getHiveClient
()
{
...
...
@@ -337,7 +349,7 @@ public class HiveMetaStoreBridge {
AtlasEntityWithExtInfo
tableEntity
=
registerTable
(
dbEntity
,
table
);
if
(
table
.
getTableType
()
==
TableType
.
EXTERNAL_TABLE
)
{
String
processQualifiedName
=
getTableProcessQualifiedName
(
clusterNam
e
,
table
);
String
processQualifiedName
=
getTableProcessQualifiedName
(
metadataNamespac
e
,
table
);
AtlasEntityWithExtInfo
processEntity
=
findProcessEntity
(
processQualifiedName
);
if
(
processEntity
==
null
)
{
...
...
@@ -350,7 +362,7 @@ public class HiveMetaStoreBridge {
processInst
.
setAttribute
(
ATTRIBUTE_QUALIFIED_NAME
,
processQualifiedName
);
processInst
.
setAttribute
(
ATTRIBUTE_NAME
,
query
);
processInst
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
clusterNam
e
);
processInst
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
metadataNamespac
e
);
processInst
.
setAttribute
(
ATTRIBUTE_INPUTS
,
Collections
.
singletonList
(
BaseHiveEvent
.
getObjectId
(
pathInst
)));
processInst
.
setAttribute
(
ATTRIBUTE_OUTPUTS
,
Collections
.
singletonList
(
BaseHiveEvent
.
getObjectId
(
tableInst
)));
processInst
.
setAttribute
(
ATTRIBUTE_USER_NAME
,
table
.
getOwner
());
...
...
@@ -396,7 +408,7 @@ public class HiveMetaStoreBridge {
Database
db
=
hiveClient
.
getDatabase
(
databaseName
);
if
(
db
!=
null
)
{
ret
=
findDatabase
(
clusterNam
e
,
databaseName
);
ret
=
findDatabase
(
metadataNamespac
e
,
databaseName
);
if
(
ret
==
null
)
{
ret
=
registerInstance
(
new
AtlasEntityWithExtInfo
(
toDbEntity
(
db
)));
...
...
@@ -542,12 +554,12 @@ public class HiveMetaStoreBridge {
String
dbName
=
hiveDB
.
getName
().
toLowerCase
();
dbEntity
.
setAttribute
(
ATTRIBUTE_QUALIFIED_NAME
,
getDBQualifiedName
(
clusterNam
e
,
dbName
));
dbEntity
.
setAttribute
(
ATTRIBUTE_QUALIFIED_NAME
,
getDBQualifiedName
(
metadataNamespac
e
,
dbName
));
dbEntity
.
setAttribute
(
ATTRIBUTE_NAME
,
dbName
);
dbEntity
.
setAttribute
(
ATTRIBUTE_DESCRIPTION
,
hiveDB
.
getDescription
());
dbEntity
.
setAttribute
(
ATTRIBUTE_OWNER
,
hiveDB
.
getOwnerName
());
dbEntity
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
clusterNam
e
);
dbEntity
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
metadataNamespac
e
);
dbEntity
.
setAttribute
(
ATTRIBUTE_LOCATION
,
HdfsNameServiceResolver
.
getPathWithNameServiceID
(
hiveDB
.
getLocationUri
()));
dbEntity
.
setAttribute
(
ATTRIBUTE_PARAMETERS
,
hiveDB
.
getParameters
());
...
...
@@ -574,7 +586,7 @@ public class HiveMetaStoreBridge {
}
AtlasEntity
tableEntity
=
table
.
getEntity
();
String
tableQualifiedName
=
getTableQualifiedName
(
clusterNam
e
,
hiveTable
);
String
tableQualifiedName
=
getTableQualifiedName
(
metadataNamespac
e
,
hiveTable
);
long
createTime
=
BaseHiveEvent
.
getTableCreateTime
(
hiveTable
);
long
lastAccessTime
=
hiveTable
.
getLastAccessTime
()
>
0
?
hiveTable
.
getLastAccessTime
()
:
createTime
;
...
...
@@ -705,7 +717,7 @@ public class HiveMetaStoreBridge {
Path
path
=
new
Path
(
pathUri
);
ret
.
setAttribute
(
ATTRIBUTE_NAME
,
Path
.
getPathWithoutSchemeAndAuthority
(
path
).
toString
());
ret
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
clusterNam
e
);
ret
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
metadataNamespac
e
);
if
(
StringUtils
.
isNotEmpty
(
nameServiceID
))
{
// Name service resolution is successful, now get updated HDFS path where the host port info is replaced by resolved name service
...
...
@@ -717,7 +729,7 @@ public class HiveMetaStoreBridge {
}
else
{
ret
.
setAttribute
(
ATTRIBUTE_PATH
,
pathUri
);
// Only append
clusterNam
e for the HDFS path
// Only append
metadataNamespac
e for the HDFS path
if
(
pathUri
.
startsWith
(
HdfsNameServiceResolver
.
HDFS_SCHEME
))
{
ret
.
setAttribute
(
ATTRIBUTE_QUALIFIED_NAME
,
getHdfsPathQualifiedName
(
pathUri
));
}
else
{
...
...
@@ -731,18 +743,18 @@ public class HiveMetaStoreBridge {
/**
* Gets the atlas entity for the database
* @param databaseName database Name
* @param
clusterNam
e cluster name
* @param
metadataNamespac
e cluster name
* @return AtlasEntity for database if exists, else null
* @throws Exception
*/
private
AtlasEntityWithExtInfo
findDatabase
(
String
clusterNam
e
,
String
databaseName
)
throws
Exception
{
private
AtlasEntityWithExtInfo
findDatabase
(
String
metadataNamespac
e
,
String
databaseName
)
throws
Exception
{
if
(
LOG
.
isDebugEnabled
())
{
LOG
.
debug
(
"Searching Atlas for database {}"
,
databaseName
);
}
String
typeName
=
HiveDataTypes
.
HIVE_DB
.
getName
();
return
findEntity
(
typeName
,
getDBQualifiedName
(
clusterNam
e
,
databaseName
));
return
findEntity
(
typeName
,
getDBQualifiedName
(
metadataNamespac
e
,
databaseName
));
}
/**
...
...
@@ -758,7 +770,7 @@ public class HiveMetaStoreBridge {
}
String
typeName
=
HiveDataTypes
.
HIVE_TABLE
.
getName
();
String
tblQualifiedName
=
getTableQualifiedName
(
get
ClusterNam
e
(),
hiveTable
.
getDbName
(),
hiveTable
.
getTableName
());
String
tblQualifiedName
=
getTableQualifiedName
(
get
MetadataNamespac
e
(),
hiveTable
.
getDbName
(),
hiveTable
.
getTableName
());
return
findEntity
(
typeName
,
tblQualifiedName
);
}
...
...
@@ -822,37 +834,37 @@ public class HiveMetaStoreBridge {
/**
* Construct the qualified name used to uniquely identify a Table instance in Atlas.
* @param
clusterName Nam
e of the cluster to which the Hive component belongs
* @param
metadataNamespace Metadata namespac
e of the cluster to which the Hive component belongs
* @param table hive table for which the qualified name is needed
* @return Unique qualified name to identify the Table instance in Atlas.
*/
private
static
String
getTableQualifiedName
(
String
clusterNam
e
,
Table
table
)
{
return
getTableQualifiedName
(
clusterNam
e
,
table
.
getDbName
(),
table
.
getTableName
(),
table
.
isTemporary
());
private
static
String
getTableQualifiedName
(
String
metadataNamespac
e
,
Table
table
)
{
return
getTableQualifiedName
(
metadataNamespac
e
,
table
.
getDbName
(),
table
.
getTableName
(),
table
.
isTemporary
());
}
private
String
getHdfsPathQualifiedName
(
String
hdfsPath
)
{
return
String
.
format
(
"%s@%s"
,
hdfsPath
,
clusterNam
e
);
return
String
.
format
(
"%s@%s"
,
hdfsPath
,
metadataNamespac
e
);
}
/**
* Construct the qualified name used to uniquely identify a Database instance in Atlas.
* @param
clusterNam
e Name of the cluster to which the Hive component belongs
* @param
metadataNamespac
e Name of the cluster to which the Hive component belongs
* @param dbName Name of the Hive database
* @return Unique qualified name to identify the Database instance in Atlas.
*/
public
static
String
getDBQualifiedName
(
String
clusterNam
e
,
String
dbName
)
{
return
String
.
format
(
"%s@%s"
,
dbName
.
toLowerCase
(),
clusterNam
e
);
public
static
String
getDBQualifiedName
(
String
metadataNamespac
e
,
String
dbName
)
{
return
String
.
format
(
"%s@%s"
,
dbName
.
toLowerCase
(),
metadataNamespac
e
);
}
/**
* Construct the qualified name used to uniquely identify a Table instance in Atlas.
* @param
clusterNam
e Name of the cluster to which the Hive component belongs
* @param
metadataNamespac
e Name of the cluster to which the Hive component belongs
* @param dbName Name of the Hive database to which the Table belongs
* @param tableName Name of the Hive table
* @param isTemporaryTable is this a temporary table
* @return Unique qualified name to identify the Table instance in Atlas.
*/
public
static
String
getTableQualifiedName
(
String
clusterNam
e
,
String
dbName
,
String
tableName
,
boolean
isTemporaryTable
)
{
public
static
String
getTableQualifiedName
(
String
metadataNamespac
e
,
String
dbName
,
String
tableName
,
boolean
isTemporaryTable
)
{
String
tableTempName
=
tableName
;
if
(
isTemporaryTable
)
{
...
...
@@ -863,11 +875,11 @@ public class HiveMetaStoreBridge {
}
}
return
String
.
format
(
"%s.%s@%s"
,
dbName
.
toLowerCase
(),
tableTempName
.
toLowerCase
(),
clusterNam
e
);
return
String
.
format
(
"%s.%s@%s"
,
dbName
.
toLowerCase
(),
tableTempName
.
toLowerCase
(),
metadataNamespac
e
);
}
public
static
String
getTableProcessQualifiedName
(
String
clusterNam
e
,
Table
table
)
{
String
tableQualifiedName
=
getTableQualifiedName
(
clusterNam
e
,
table
);
public
static
String
getTableProcessQualifiedName
(
String
metadataNamespac
e
,
Table
table
)
{
String
tableQualifiedName
=
getTableQualifiedName
(
metadataNamespac
e
,
table
);
long
createdTime
=
getTableCreatedTime
(
table
);
return
tableQualifiedName
+
SEP
+
createdTime
;
...
...
@@ -876,13 +888,13 @@ public class HiveMetaStoreBridge {
/**
* Construct the qualified name used to uniquely identify a Table instance in Atlas.
* @param
clusterName Nam
e of the cluster to which the Hive component belongs
* @param
metadataNamespace Metadata namespac
e of the cluster to which the Hive component belongs
* @param dbName Name of the Hive database to which the Table belongs
* @param tableName Name of the Hive table
* @return Unique qualified name to identify the Table instance in Atlas.
*/
public
static
String
getTableQualifiedName
(
String
clusterNam
e
,
String
dbName
,
String
tableName
)
{
return
getTableQualifiedName
(
clusterNam
e
,
dbName
,
tableName
,
false
);
public
static
String
getTableQualifiedName
(
String
metadataNamespac
e
,
String
dbName
,
String
tableName
)
{
return
getTableQualifiedName
(
metadataNamespac
e
,
dbName
,
tableName
,
false
);
}
public
static
String
getStorageDescQFName
(
String
tableQualifiedName
)
{
return
tableQualifiedName
+
"_storage"
;
...
...
@@ -891,9 +903,9 @@ public class HiveMetaStoreBridge {
public
static
String
getColumnQualifiedName
(
final
String
tableQualifiedName
,
final
String
colName
)
{
final
String
[]
parts
=
tableQualifiedName
.
split
(
"@"
);
final
String
tableName
=
parts
[
0
];
final
String
clusterNam
e
=
parts
[
1
];
final
String
metadataNamespac
e
=
parts
[
1
];
return
String
.
format
(
"%s.%s@%s"
,
tableName
,
colName
.
toLowerCase
(),
clusterNam
e
);
return
String
.
format
(
"%s.%s@%s"
,
tableName
,
colName
.
toLowerCase
(),
metadataNamespac
e
);
}
public
static
long
getTableCreatedTime
(
Table
table
)
{
...
...
This diff is collapsed.
Click to expand it.
addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/AtlasHiveHookContext.java
View file @
ef402516
...
...
@@ -38,7 +38,7 @@ import static org.apache.atlas.hive.hook.events.BaseHiveEvent.toTable;
public
class
AtlasHiveHookContext
{
public
static
final
char
QNAME_SEP_
CLUSTER_NAM
E
=
'@'
;
public
static
final
char
QNAME_SEP_
METADATA_NAMESPAC
E
=
'@'
;
public
static
final
char
QNAME_SEP_ENTITY_NAME
=
'.'
;
public
static
final
char
QNAME_SEP_PROCESS
=
':'
;
public
static
final
String
TEMP_TABLE_PREFIX
=
"_temp-"
;
...
...
@@ -157,8 +157,8 @@ public class AtlasHiveHookContext {
public
Collection
<
AtlasEntity
>
getEntities
()
{
return
qNameEntityMap
.
values
();
}
public
String
get
ClusterNam
e
()
{
return
hook
.
get
ClusterNam
e
();
public
String
get
MetadataNamespac
e
()
{
return
hook
.
get
MetadataNamespac
e
();
}
public
String
getHostName
()
{
return
hook
.
getHostName
();
}
...
...
@@ -192,7 +192,7 @@ public class AtlasHiveHookContext {
}
public
String
getQualifiedName
(
Database
db
)
{
return
(
db
.
getName
()
+
QNAME_SEP_
CLUSTER_NAME
).
toLowerCase
()
+
getClusterNam
e
();
return
(
db
.
getName
()
+
QNAME_SEP_
METADATA_NAMESPACE
).
toLowerCase
()
+
getMetadataNamespac
e
();
}
public
String
getQualifiedName
(
Table
table
)
{
...
...
@@ -206,7 +206,7 @@ public class AtlasHiveHookContext {
}
}
return
(
table
.
getDbName
()
+
QNAME_SEP_ENTITY_NAME
+
tableName
+
QNAME_SEP_
CLUSTER_NAME
).
toLowerCase
()
+
getClusterNam
e
();
return
(
table
.
getDbName
()
+
QNAME_SEP_ENTITY_NAME
+
tableName
+
QNAME_SEP_
METADATA_NAMESPACE
).
toLowerCase
()
+
getMetadataNamespac
e
();
}
public
boolean
isKnownDatabase
(
String
dbQualifiedName
)
{
...
...
This diff is collapsed.
Click to expand it.
addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
View file @
ef402516
...
...
@@ -55,7 +55,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
public
enum
PreprocessAction
{
NONE
,
IGNORE
,
PRUNE
}
public
static
final
String
CONF_PREFIX
=
"atlas.hook.hive."
;
public
static
final
String
CONF_CLUSTER_NAME
=
"atlas.cluster.name"
;
public
static
final
String
HDFS_PATH_CONVERT_TO_LOWER_CASE
=
CONF_PREFIX
+
"hdfs_path.convert_to_lowercase"
;
public
static
final
String
HOOK_NAME_CACHE_ENABLED
=
CONF_PREFIX
+
"name.cache.enabled"
;
public
static
final
String
HOOK_NAME_CACHE_DATABASE_COUNT
=
CONF_PREFIX
+
"name.cache.database.count"
;
...
...
@@ -66,13 +65,10 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
public
static
final
String
HOOK_HIVE_TABLE_IGNORE_PATTERN
=
CONF_PREFIX
+
"hive_table.ignore.pattern"
;
public
static
final
String
HOOK_HIVE_TABLE_PRUNE_PATTERN
=
CONF_PREFIX
+
"hive_table.prune.pattern"
;
public
static
final
String
HOOK_HIVE_TABLE_CACHE_SIZE
=
CONF_PREFIX
+
"hive_table.cache.size"
;
public
static
final
String
DEFAULT_CLUSTER_NAME
=
"primary"
;
public
static
final
String
DEFAULT_HOST_NAME
=
"localhost"
;
private
static
final
Map
<
String
,
HiveOperation
>
OPERATION_MAP
=
new
HashMap
<>();
private
static
final
String
clusterName
;
private
static
final
boolean
convertHdfsPathToLowerCase
;
private
static
final
boolean
nameCacheEnabled
;
private
static
final
int
nameCacheDatabaseMaxCount
;
...
...
@@ -96,7 +92,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
OPERATION_MAP
.
put
(
hiveOperation
.
getOperationName
(),
hiveOperation
);
}
clusterName
=
atlasProperties
.
getString
(
CONF_CLUSTER_NAME
,
DEFAULT_CLUSTER_NAME
);
convertHdfsPathToLowerCase
=
atlasProperties
.
getBoolean
(
HDFS_PATH_CONVERT_TO_LOWER_CASE
,
false
);
nameCacheEnabled
=
atlasProperties
.
getBoolean
(
HOOK_NAME_CACHE_ENABLED
,
true
);
nameCacheDatabaseMaxCount
=
atlasProperties
.
getInt
(
HOOK_NAME_CACHE_DATABASE_COUNT
,
10000
);
...
...
@@ -253,10 +248,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
}
}
public
String
getClusterName
()
{
return
clusterName
;
}
public
boolean
isConvertHdfsPathToLowerCase
()
{
return
convertHdfsPathToLowerCase
;
}
...
...
This diff is collapsed.
Click to expand it.
addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java
View file @
ef402516
...
...
@@ -62,7 +62,7 @@ import java.util.List;
import
java.util.Map
;
import
java.util.Set
;
import
static
org
.
apache
.
atlas
.
hive
.
hook
.
AtlasHiveHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
;
import
static
org
.
apache
.
atlas
.
hive
.
hook
.
AtlasHiveHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
;
import
static
org
.
apache
.
atlas
.
hive
.
hook
.
AtlasHiveHookContext
.
QNAME_SEP_ENTITY_NAME
;
import
static
org
.
apache
.
atlas
.
hive
.
hook
.
AtlasHiveHookContext
.
QNAME_SEP_PROCESS
;
...
...
@@ -350,7 +350,7 @@ public abstract class BaseHiveEvent {
ret
.
setAttribute
(
ATTRIBUTE_DESCRIPTION
,
db
.
getDescription
());
ret
.
setAttribute
(
ATTRIBUTE_OWNER
,
db
.
getOwnerName
());
ret
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
get
ClusterNam
e
());
ret
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
get
MetadataNamespac
e
());
ret
.
setAttribute
(
ATTRIBUTE_LOCATION
,
HdfsNameServiceResolver
.
getPathWithNameServiceID
(
db
.
getLocationUri
()));
ret
.
setAttribute
(
ATTRIBUTE_PARAMETERS
,
db
.
getParameters
());
...
...
@@ -597,6 +597,7 @@ public abstract class BaseHiveEvent {
protected
AtlasEntity
getPathEntity
(
Path
path
,
AtlasEntityExtInfo
extInfo
)
{
AtlasEntity
ret
;
String
strPath
=
path
.
toString
();
String
metadataNamespace
=
getMetadataNamespace
();
if
(
strPath
.
startsWith
(
HDFS_PATH_PREFIX
)
&&
context
.
isConvertHdfsPathToLowerCase
())
{
strPath
=
strPath
.
toLowerCase
();
...
...
@@ -604,8 +605,8 @@ public abstract class BaseHiveEvent {
if
(
isS3Path
(
strPath
))
{
String
bucketName
=
path
.
toUri
().
getAuthority
();
String
bucketQualifiedName
=
(
path
.
toUri
().
getScheme
()
+
SCHEME_SEPARATOR
+
path
.
toUri
().
getAuthority
()
+
QNAME_SEP_
CLUSTER_NAME
).
toLowerCase
()
+
getClusterName
()
;
String
pathQualifiedName
=
(
strPath
+
QNAME_SEP_
CLUSTER_NAME
).
toLowerCase
()
+
getClusterName
()
;
String
bucketQualifiedName
=
(
path
.
toUri
().
getScheme
()
+
SCHEME_SEPARATOR
+
path
.
toUri
().
getAuthority
()
+
QNAME_SEP_
METADATA_NAMESPACE
).
toLowerCase
()
+
metadataNamespace
;
String
pathQualifiedName
=
(
strPath
+
QNAME_SEP_
METADATA_NAMESPACE
).
toLowerCase
()
+
metadataNamespace
;
AtlasEntity
bucketEntity
=
context
.
getEntity
(
bucketQualifiedName
);
ret
=
context
.
getEntity
(
pathQualifiedName
);
...
...
@@ -654,7 +655,7 @@ public abstract class BaseHiveEvent {
ret
.
setAttribute
(
ATTRIBUTE_PATH
,
attrPath
);
ret
.
setAttribute
(
ATTRIBUTE_QUALIFIED_NAME
,
pathQualifiedName
);
ret
.
setAttribute
(
ATTRIBUTE_NAME
,
name
);
ret
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
getClusterName
()
);
ret
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
metadataNamespace
);
context
.
putEntity
(
pathQualifiedName
,
ret
);
}
...
...
@@ -751,8 +752,8 @@ public abstract class BaseHiveEvent {
return
hiveDDL
;
}
protected
String
get
ClusterNam
e
()
{
return
context
.
get
ClusterNam
e
();
protected
String
get
MetadataNamespac
e
()
{
return
context
.
get
MetadataNamespac
e
();
}
protected
Database
getDatabases
(
String
dbName
)
throws
Exception
{
...
...
@@ -870,7 +871,7 @@ public abstract class BaseHiveEvent {
protected
String
getQualifiedName
(
Table
table
,
FieldSchema
column
)
{
String
tblQualifiedName
=
getQualifiedName
(
table
);
int
sepPos
=
tblQualifiedName
.
lastIndexOf
(
QNAME_SEP_
CLUSTER_NAM
E
);
int
sepPos
=
tblQualifiedName
.
lastIndexOf
(
QNAME_SEP_
METADATA_NAMESPAC
E
);
if
(
sepPos
==
-
1
)
{
return
tblQualifiedName
+
QNAME_SEP_ENTITY_NAME
+
column
.
getName
().
toLowerCase
();
...
...
@@ -891,16 +892,17 @@ public abstract class BaseHiveEvent {
String
dbName
=
column
.
getTabAlias
().
getTable
().
getDbName
();
String
tableName
=
column
.
getTabAlias
().
getTable
().
getTableName
();
String
colName
=
column
.
getColumn
()
!=
null
?
column
.
getColumn
().
getName
()
:
null
;
String
metadataNamespace
=
getMetadataNamespace
();
if
(
colName
==
null
)
{
return
(
dbName
+
QNAME_SEP_ENTITY_NAME
+
tableName
+
QNAME_SEP_
CLUSTER_NAME
).
toLowerCase
()
+
getClusterName
()
;
return
(
dbName
+
QNAME_SEP_ENTITY_NAME
+
tableName
+
QNAME_SEP_
METADATA_NAMESPACE
).
toLowerCase
()
+
metadataNamespace
;
}
else
{
return
(
dbName
+
QNAME_SEP_ENTITY_NAME
+
tableName
+
QNAME_SEP_ENTITY_NAME
+
colName
+
QNAME_SEP_
CLUSTER_NAME
).
toLowerCase
()
+
getClusterName
()
;
return
(
dbName
+
QNAME_SEP_ENTITY_NAME
+
tableName
+
QNAME_SEP_ENTITY_NAME
+
colName
+
QNAME_SEP_
METADATA_NAMESPACE
).
toLowerCase
()
+
metadataNamespace
;
}
}
protected
String
getQualifiedName
(
String
dbName
,
String
tableName
,
String
colName
)
{
return
(
dbName
+
QNAME_SEP_ENTITY_NAME
+
tableName
+
QNAME_SEP_ENTITY_NAME
+
colName
+
QNAME_SEP_
CLUSTER_NAME
).
toLowerCase
()
+
getClusterNam
e
();
return
(
dbName
+
QNAME_SEP_ENTITY_NAME
+
tableName
+
QNAME_SEP_ENTITY_NAME
+
colName
+
QNAME_SEP_
METADATA_NAMESPACE
).
toLowerCase
()
+
getMetadataNamespac
e
();
}
protected
String
getQualifiedName
(
URI
location
)
{
...
...
@@ -918,14 +920,14 @@ public abstract class BaseHiveEvent {
protected
String
getQualifiedName
(
String
path
)
{
if
(
path
.
startsWith
(
HdfsNameServiceResolver
.
HDFS_SCHEME
))
{
return
path
+
QNAME_SEP_
CLUSTER_NAME
+
getClusterNam
e
();
return
path
+
QNAME_SEP_
METADATA_NAMESPACE
+
getMetadataNamespac
e
();
}
return
path
.
toLowerCase
();
}
protected
String
getColumnQualifiedName
(
String
tblQualifiedName
,
String
columnName
)
{
int
sepPos
=
tblQualifiedName
.
lastIndexOf
(
QNAME_SEP_
CLUSTER_NAM
E
);
int
sepPos
=
tblQualifiedName
.
lastIndexOf
(
QNAME_SEP_
METADATA_NAMESPAC
E
);
if
(
sepPos
==
-
1
)
{
return
tblQualifiedName
+
QNAME_SEP_ENTITY_NAME
+
columnName
.
toLowerCase
();
...
...
@@ -984,12 +986,13 @@ public abstract class BaseHiveEvent {
HBaseTableInfo
hBaseTableInfo
=
new
HBaseTableInfo
(
table
);
String
hbaseNameSpace
=
hBaseTableInfo
.
getHbaseNameSpace
();
String
hbaseTableName
=
hBaseTableInfo
.
getHbaseTableName
();
String
metadataNamespace
=
getMetadataNamespace
();
if
(
hbaseTableName
!=
null
)
{
AtlasEntity
nsEntity
=
new
AtlasEntity
(
HBASE_TYPE_NAMESPACE
);
nsEntity
.
setAttribute
(
ATTRIBUTE_NAME
,
hbaseNameSpace
);
nsEntity
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
getClusterName
()
);
nsEntity
.
setAttribute
(
ATTRIBUTE_QUALIFIED_NAME
,
getHBaseNameSpaceQualifiedName
(
getClusterName
()
,
hbaseNameSpace
));
nsEntity
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
metadataNamespace
);
nsEntity
.
setAttribute
(
ATTRIBUTE_QUALIFIED_NAME
,
getHBaseNameSpaceQualifiedName
(
metadataNamespace
,
hbaseNameSpace
));
ret
=
new
AtlasEntity
(
HBASE_TYPE_TABLE
);
...
...
@@ -999,7 +1002,7 @@ public abstract class BaseHiveEvent {
AtlasRelatedObjectId
objIdRelatedObject
=
new
AtlasRelatedObjectId
(
getObjectId
(
nsEntity
),
RELATIONSHIP_HBASE_TABLE_NAMESPACE
);
ret
.
setRelationshipAttribute
(
ATTRIBUTE_NAMESPACE
,
objIdRelatedObject
);
ret
.
setAttribute
(
ATTRIBUTE_QUALIFIED_NAME
,
getHBaseTableQualifiedName
(
getClusterName
()
,
hbaseNameSpace
,
hbaseTableName
));
ret
.
setAttribute
(
ATTRIBUTE_QUALIFIED_NAME
,
getHBaseTableQualifiedName
(
metadataNamespace
,
hbaseNameSpace
,
hbaseTableName
));
entities
.
addReferredEntity
(
nsEntity
);
entities
.
addEntity
(
ret
);
...
...
@@ -1021,12 +1024,12 @@ public abstract class BaseHiveEvent {
return
ret
;
}
private
static
String
getHBaseTableQualifiedName
(
String
clusterNam
e
,
String
nameSpace
,
String
tableName
)
{
return
String
.
format
(
"%s:%s@%s"
,
nameSpace
.
toLowerCase
(),
tableName
.
toLowerCase
(),
clusterNam
e
);
private
static
String
getHBaseTableQualifiedName
(
String
metadataNamespac
e
,
String
nameSpace
,
String
tableName
)
{
return
String
.
format
(
"%s:%s@%s"
,
nameSpace
.
toLowerCase
(),
tableName
.
toLowerCase
(),
metadataNamespac
e
);
}
private
static
String
getHBaseNameSpaceQualifiedName
(
String
clusterNam
e
,
String
nameSpace
)
{
return
String
.
format
(
"%s@%s"
,
nameSpace
.
toLowerCase
(),
clusterNam
e
);
private
static
String
getHBaseNameSpaceQualifiedName
(
String
metadataNamespac
e
,
String
nameSpace
)
{
return
String
.
format
(
"%s@%s"
,
nameSpace
.
toLowerCase
(),
metadataNamespac
e
);
}
private
boolean
ignoreHDFSPathsinProcessQualifiedName
()
{
...
...
This diff is collapsed.
Click to expand it.
addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java
View file @
ef402516
...
...
@@ -519,7 +519,7 @@ public class HiveITBase {
Table
outTable
=
entity
.
getTable
();
//refresh table
outTable
=
dgiBridge
.
getHiveClient
().
getTable
(
outTable
.
getDbName
(),
outTable
.
getTableName
());
return
HiveMetaStoreBridge
.
getTableProcessQualifiedName
(
dgiBridge
.
get
ClusterNam
e
(),
outTable
);
return
HiveMetaStoreBridge
.
getTableProcessQualifiedName
(
dgiBridge
.
get
MetadataNamespac
e
(),
outTable
);
}
}
...
...
This diff is collapsed.
Click to expand it.
addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeTest.java
View file @
ef402516
...
...
@@ -53,9 +53,8 @@ import static org.mockito.Mockito.verify;
import
static
org
.
mockito
.
Mockito
.
when
;
public
class
HiveMetaStoreBridgeTest
{
private
static
final
String
TEST_DB_NAME
=
"default"
;
public
static
final
String
CLUSTER_NAM
E
=
"primary"
;
public
static
final
String
METADATA_NAMESPAC
E
=
"primary"
;
public
static
final
String
TEST_TABLE_NAME
=
"test_table"
;
@Mock
...
...
@@ -90,13 +89,13 @@ public class HiveMetaStoreBridgeTest {
when
(
hiveClient
.
getDatabase
(
TEST_DB_NAME
)).
thenReturn
(
db
);
when
(
hiveClient
.
getAllTables
(
TEST_DB_NAME
)).
thenReturn
(
Arrays
.
asList
(
new
String
[]{}));
returnExistingDatabase
(
TEST_DB_NAME
,
atlasClientV2
,
CLUSTER_NAM
E
);
returnExistingDatabase
(
TEST_DB_NAME
,
atlasClientV2
,
METADATA_NAMESPAC
E
);
when
(
atlasEntityWithExtInfo
.
getEntity
(
"72e06b34-9151-4023-aa9d-b82103a50e76"
))
.
thenReturn
((
new
AtlasEntity
.
AtlasEntityWithExtInfo
(
getEntity
(
HiveDataTypes
.
HIVE_DB
.
getName
(),
AtlasClient
.
GUID
,
"72e06b34-9151-4023-aa9d-b82103a50e76"
))).
getEntity
());
HiveMetaStoreBridge
bridge
=
new
HiveMetaStoreBridge
(
CLUSTER_NAM
E
,
hiveClient
,
atlasClientV2
);
HiveMetaStoreBridge
bridge
=
new
HiveMetaStoreBridge
(
METADATA_NAMESPAC
E
,
hiveClient
,
atlasClientV2
);
bridge
.
importHiveMetadata
(
null
,
null
,
true
);
// verify update is called
...
...
@@ -109,7 +108,7 @@ public class HiveMetaStoreBridgeTest {
List
<
Table
>
hiveTables
=
setupTables
(
hiveClient
,
TEST_DB_NAME
,
TEST_TABLE_NAME
);
returnExistingDatabase
(
TEST_DB_NAME
,
atlasClientV2
,
CLUSTER_NAM
E
);
returnExistingDatabase
(
TEST_DB_NAME
,
atlasClientV2
,
METADATA_NAMESPAC
E
);
// return existing table
...
...
@@ -119,7 +118,7 @@ public class HiveMetaStoreBridgeTest {
when
(
atlasClientV2
.
getEntityByAttribute
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
Collections
.
singletonMap
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
HiveMetaStoreBridge
.
getTableQualifiedName
(
CLUSTER_NAM
E
,
TEST_DB_NAME
,
TEST_TABLE_NAME
))))
HiveMetaStoreBridge
.
getTableQualifiedName
(
METADATA_NAMESPAC
E
,
TEST_DB_NAME
,
TEST_TABLE_NAME
))))
.
thenReturn
(
new
AtlasEntity
.
AtlasEntityWithExtInfo
(
getEntity
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
AtlasClient
.
GUID
,
"82e06b34-9151-4023-aa9d-b82103a50e77"
)));
...
...
@@ -127,7 +126,7 @@ public class HiveMetaStoreBridgeTest {
.
thenReturn
(
createTableReference
());
Table
testTable
=
hiveTables
.
get
(
0
);
String
processQualifiedName
=
HiveMetaStoreBridge
.
getTableProcessQualifiedName
(
CLUSTER_NAM
E
,
testTable
);
String
processQualifiedName
=
HiveMetaStoreBridge
.
getTableProcessQualifiedName
(
METADATA_NAMESPAC
E
,
testTable
);
when
(
atlasClientV2
.
getEntityByAttribute
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
Collections
.
singletonMap
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
...
...
@@ -136,7 +135,7 @@ public class HiveMetaStoreBridgeTest {
getEntity
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
AtlasClient
.
GUID
,
"82e06b34-9151-4023-aa9d-b82103a50e77"
)));
HiveMetaStoreBridge
bridge
=
new
HiveMetaStoreBridge
(
CLUSTER_NAM
E
,
hiveClient
,
atlasClientV2
);
HiveMetaStoreBridge
bridge
=
new
HiveMetaStoreBridge
(
METADATA_NAMESPAC
E
,
hiveClient
,
atlasClientV2
);
bridge
.
importHiveMetadata
(
null
,
null
,
true
);
// verify update is called on table
...
...
@@ -144,13 +143,13 @@ public class HiveMetaStoreBridgeTest {
}
private
void
returnExistingDatabase
(
String
databaseName
,
AtlasClientV2
atlasClientV2
,
String
clusterNam
e
)
private
void
returnExistingDatabase
(
String
databaseName
,
AtlasClientV2
atlasClientV2
,
String
metadataNamespac
e
)
throws
AtlasServiceException
{
//getEntity(HiveDataTypes.HIVE_DB.getName(), AtlasClient.GUID, "72e06b34-9151-4023-aa9d-b82103a50e76");
when
(
atlasClientV2
.
getEntityByAttribute
(
HiveDataTypes
.
HIVE_DB
.
getName
(),
Collections
.
singletonMap
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
HiveMetaStoreBridge
.
getDBQualifiedName
(
CLUSTER_NAM
E
,
TEST_DB_NAME
))))
HiveMetaStoreBridge
.
getDBQualifiedName
(
METADATA_NAMESPAC
E
,
TEST_DB_NAME
))))
.
thenReturn
((
new
AtlasEntity
.
AtlasEntityWithExtInfo
(
getEntity
(
HiveDataTypes
.
HIVE_DB
.
getName
(),
AtlasClient
.
GUID
,
"72e06b34-9151-4023-aa9d-b82103a50e76"
))));
...
...
@@ -179,16 +178,16 @@ public class HiveMetaStoreBridgeTest {
List
<
Table
>
hiveTables
=
setupTables
(
hiveClient
,
TEST_DB_NAME
,
TEST_TABLE_NAME
);
Table
hiveTable
=
hiveTables
.
get
(
0
);
returnExistingDatabase
(
TEST_DB_NAME
,
atlasClientV2
,
CLUSTER_NAM
E
);
returnExistingDatabase
(
TEST_DB_NAME
,
atlasClientV2
,
METADATA_NAMESPAC
E
);
when
(
atlasClientV2
.
getEntityByAttribute
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
Collections
.
singletonMap
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
HiveMetaStoreBridge
.
getTableQualifiedName
(
CLUSTER_NAM
E
,
TEST_DB_NAME
,
TEST_TABLE_NAME
))))
HiveMetaStoreBridge
.
getTableQualifiedName
(
METADATA_NAMESPAC
E
,
TEST_DB_NAME
,
TEST_TABLE_NAME
))))
.
thenReturn
(
new
AtlasEntity
.
AtlasEntityWithExtInfo
(
getEntity
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
AtlasClient
.
GUID
,
"82e06b34-9151-4023-aa9d-b82103a50e77"
)));
String
processQualifiedName
=
HiveMetaStoreBridge
.
getTableProcessQualifiedName
(
CLUSTER_NAM
E
,
hiveTable
);
String
processQualifiedName
=
HiveMetaStoreBridge
.
getTableProcessQualifiedName
(
METADATA_NAMESPAC
E
,
hiveTable
);
when
(
atlasClientV2
.
getEntityByAttribute
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
Collections
.
singletonMap
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
...
...
@@ -206,7 +205,7 @@ public class HiveMetaStoreBridgeTest {
when
(
hiveClient
.
getPartitions
(
hiveTable
)).
thenReturn
(
Arrays
.
asList
(
new
Partition
[]{
partition
}));
HiveMetaStoreBridge
bridge
=
new
HiveMetaStoreBridge
(
CLUSTER_NAM
E
,
hiveClient
,
atlasClientV2
);
HiveMetaStoreBridge
bridge
=
new
HiveMetaStoreBridge
(
METADATA_NAMESPAC
E
,
hiveClient
,
atlasClientV2
);
try
{
bridge
.
importHiveMetadata
(
null
,
null
,
true
);
}
catch
(
Exception
e
)
{
...
...
@@ -220,12 +219,12 @@ public class HiveMetaStoreBridgeTest {
final
String
table2Name
=
TEST_TABLE_NAME
+
"_1"
;
List
<
Table
>
hiveTables
=
setupTables
(
hiveClient
,
TEST_DB_NAME
,
TEST_TABLE_NAME
,
table2Name
);
returnExistingDatabase
(
TEST_DB_NAME
,
atlasClientV2
,
CLUSTER_NAM
E
);
returnExistingDatabase
(
TEST_DB_NAME
,
atlasClientV2
,
METADATA_NAMESPAC
E
);
when
(
hiveClient
.
getTable
(
TEST_DB_NAME
,
TEST_TABLE_NAME
)).
thenThrow
(
new
RuntimeException
(
"Timeout while reading data from hive metastore"
));
when
(
atlasClientV2
.
getEntityByAttribute
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
Collections
.
singletonMap
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
HiveMetaStoreBridge
.
getTableQualifiedName
(
CLUSTER_NAM
E
,
TEST_DB_NAME
,
TEST_TABLE_NAME
))))
HiveMetaStoreBridge
.
getTableQualifiedName
(
METADATA_NAMESPAC
E
,
TEST_DB_NAME
,
TEST_TABLE_NAME
))))
.
thenReturn
(
new
AtlasEntity
.
AtlasEntityWithExtInfo
(
getEntity
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
AtlasClient
.
GUID
,
"82e06b34-9151-4023-aa9d-b82103a50e77"
)));
...
...
@@ -233,7 +232,7 @@ public class HiveMetaStoreBridgeTest {
.
thenReturn
(
createTableReference
());
Table
testTable
=
hiveTables
.
get
(
1
);
String
processQualifiedName
=
HiveMetaStoreBridge
.
getTableProcessQualifiedName
(
CLUSTER_NAM
E
,
testTable
);
String
processQualifiedName
=
HiveMetaStoreBridge
.
getTableProcessQualifiedName
(
METADATA_NAMESPAC
E
,
testTable
);
when
(
atlasClientV2
.
getEntityByAttribute
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
Collections
.
singletonMap
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
...
...
@@ -241,7 +240,7 @@ public class HiveMetaStoreBridgeTest {
.
thenReturn
(
new
AtlasEntity
.
AtlasEntityWithExtInfo
(
getEntity
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
AtlasClient
.
GUID
,
"82e06b34-9151-4023-aa9d-b82103a50e77"
)));
HiveMetaStoreBridge
bridge
=
new
HiveMetaStoreBridge
(
CLUSTER_NAM
E
,
hiveClient
,
atlasClientV2
);
HiveMetaStoreBridge
bridge
=
new
HiveMetaStoreBridge
(
METADATA_NAMESPAC
E
,
hiveClient
,
atlasClientV2
);
try
{
bridge
.
importHiveMetadata
(
null
,
null
,
false
);
}
catch
(
Exception
e
)
{
...
...
@@ -255,13 +254,13 @@ public class HiveMetaStoreBridgeTest {
final
String
table2Name
=
TEST_TABLE_NAME
+
"_1"
;
List
<
Table
>
hiveTables
=
setupTables
(
hiveClient
,
TEST_DB_NAME
,
TEST_TABLE_NAME
,
table2Name
);
returnExistingDatabase
(
TEST_DB_NAME
,
atlasClientV2
,
CLUSTER_NAM
E
);
returnExistingDatabase
(
TEST_DB_NAME
,
atlasClientV2
,
METADATA_NAMESPAC
E
);
when
(
hiveClient
.
getTable
(
TEST_DB_NAME
,
TEST_TABLE_NAME
)).
thenThrow
(
new
RuntimeException
(
"Timeout while reading data from hive metastore"
));
when
(
atlasClientV2
.
getEntityByAttribute
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
Collections
.
singletonMap
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
HiveMetaStoreBridge
.
getTableQualifiedName
(
CLUSTER_NAM
E
,
TEST_DB_NAME
,
TEST_TABLE_NAME
))))
HiveMetaStoreBridge
.
getTableQualifiedName
(
METADATA_NAMESPAC
E
,
TEST_DB_NAME
,
TEST_TABLE_NAME
))))
.
thenReturn
(
new
AtlasEntity
.
AtlasEntityWithExtInfo
(
getEntity
(
HiveDataTypes
.
HIVE_TABLE
.
getName
(),
AtlasClient
.
GUID
,
"82e06b34-9151-4023-aa9d-b82103a50e77"
)));
...
...
@@ -270,7 +269,7 @@ public class HiveMetaStoreBridgeTest {
.
thenReturn
(
createTableReference
());
Table
testTable
=
hiveTables
.
get
(
1
);
String
processQualifiedName
=
HiveMetaStoreBridge
.
getTableProcessQualifiedName
(
CLUSTER_NAM
E
,
testTable
);
String
processQualifiedName
=
HiveMetaStoreBridge
.
getTableProcessQualifiedName
(
METADATA_NAMESPAC
E
,
testTable
);
when
(
atlasClientV2
.
getEntityByAttribute
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
Collections
.
singletonMap
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
...
...
@@ -278,7 +277,7 @@ public class HiveMetaStoreBridgeTest {
.
thenReturn
(
new
AtlasEntity
.
AtlasEntityWithExtInfo
(
getEntity
(
HiveDataTypes
.
HIVE_PROCESS
.
getName
(),
AtlasClient
.
GUID
,
"82e06b34-9151-4023-aa9d-b82103a50e77"
)));
HiveMetaStoreBridge
bridge
=
new
HiveMetaStoreBridge
(
CLUSTER_NAM
E
,
hiveClient
,
atlasClientV2
);
HiveMetaStoreBridge
bridge
=
new
HiveMetaStoreBridge
(
METADATA_NAMESPAC
E
,
hiveClient
,
atlasClientV2
);
try
{
bridge
.
importHiveMetadata
(
null
,
null
,
true
);
Assert
.
fail
(
"Table registration is supposed to fail"
);
...
...
This diff is collapsed.
Click to expand it.
addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/AtlasImpalaHookContext.java
View file @
ef402516
...
...
@@ -33,7 +33,7 @@ import org.apache.commons.lang.StringUtils;
* Contain the info related to an linear record from Impala
*/
public
class
AtlasImpalaHookContext
{
public
static
final
char
QNAME_SEP_CLUSTER_NAM
E
=
'@'
;
public
static
final
char
QNAME_SEP_METADATA_NAMESPAC
E
=
'@'
;
public
static
final
char
QNAME_SEP_ENTITY_NAME
=
'.'
;
public
static
final
char
QNAME_SEP_PROCESS
=
':'
;
...
...
@@ -69,8 +69,8 @@ public class AtlasImpalaHookContext {
public
Collection
<
AtlasEntity
>
getEntities
()
{
return
qNameEntityMap
.
values
();
}
public
String
get
ClusterNam
e
()
{
return
hook
.
get
ClusterNam
e
();
public
String
get
MetadataNamespac
e
()
{
return
hook
.
get
MetadataNamespac
e
();
}
public
String
getHostName
()
{
...
...
@@ -82,7 +82,7 @@ public class AtlasImpalaHookContext {
}
public
String
getQualifiedNameForDb
(
String
dbName
)
{
return
(
dbName
+
QNAME_SEP_
CLUSTER_NAME
).
toLowerCase
()
+
getClusterNam
e
();
return
(
dbName
+
QNAME_SEP_
METADATA_NAMESPACE
).
toLowerCase
()
+
getMetadataNamespac
e
();
}
public
String
getQualifiedNameForTable
(
String
fullTableName
)
throws
IllegalArgumentException
{
...
...
@@ -100,8 +100,7 @@ public class AtlasImpalaHookContext {
}
public
String
getQualifiedNameForTable
(
String
dbName
,
String
tableName
)
{
return
(
dbName
+
QNAME_SEP_ENTITY_NAME
+
tableName
+
QNAME_SEP_CLUSTER_NAME
).
toLowerCase
()
+
getClusterName
();
return
(
dbName
+
QNAME_SEP_ENTITY_NAME
+
tableName
+
QNAME_SEP_METADATA_NAMESPACE
).
toLowerCase
()
+
getMetadataNamespace
();
}
public
String
getQualifiedNameForColumn
(
LineageVertex
vertex
)
{
...
...
@@ -179,7 +178,7 @@ public class AtlasImpalaHookContext {
public
String
getQualifiedNameForColumn
(
String
dbName
,
String
tableName
,
String
columnName
)
{
return
(
dbName
+
QNAME_SEP_ENTITY_NAME
+
tableName
+
QNAME_SEP_ENTITY_NAME
+
columnName
+
QNAME_SEP_
CLUSTER_NAME
).
toLowerCase
()
+
getClusterNam
e
();
columnName
+
QNAME_SEP_
METADATA_NAMESPACE
).
toLowerCase
()
+
getMetadataNamespac
e
();
}
public
String
getUserName
()
{
return
lineageQuery
.
getUser
();
}
...
...
This diff is collapsed.
Click to expand it.
addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaLineageHook.java
View file @
ef402516
...
...
@@ -18,7 +18,6 @@
package
org
.
apache
.
atlas
.
impala
.
hook
;
import
static
org
.
apache
.
atlas
.
AtlasConstants
.
DEFAULT_CLUSTER_NAME
;
import
java.net.InetAddress
;
import
java.net.UnknownHostException
;
import
com.google.common.collect.Sets
;
...
...
@@ -42,18 +41,15 @@ public class ImpalaLineageHook extends AtlasHook {
public
static
final
String
ATLAS_ENDPOINT
=
"atlas.rest.address"
;
public
static
final
String
REALM_SEPARATOR
=
"@"
;
public
static
final
String
CONF_PREFIX
=
"atlas.hook.impala."
;
public
static
final
String
CONF_CLUSTER_NAME
=
"atlas.cluster.name"
;
public
static
final
String
CONF_REALM_NAME
=
"atlas.realm.name"
;
public
static
final
String
HDFS_PATH_CONVERT_TO_LOWER_CASE
=
CONF_PREFIX
+
"hdfs_path.convert_to_lowercase"
;
public
static
final
String
DEFAULT_HOST_NAME
=
"localhost"
;
private
static
final
String
clusterName
;
private
static
final
String
realm
;
private
static
final
boolean
convertHdfsPathToLowerCase
;
private
static
String
hostName
;
static
{
clusterName
=
atlasProperties
.
getString
(
CONF_CLUSTER_NAME
,
DEFAULT_CLUSTER_NAME
);
realm
=
atlasProperties
.
getString
(
CONF_REALM_NAME
,
DEFAULT_CLUSTER_NAME
);
// what should default be ??
convertHdfsPathToLowerCase
=
atlasProperties
.
getBoolean
(
HDFS_PATH_CONVERT_TO_LOWER_CASE
,
false
);
...
...
@@ -143,10 +139,6 @@ public class ImpalaLineageHook extends AtlasHook {
return
UserGroupInformation
.
getUGIFromSubject
(
userSubject
);
}
public
String
getClusterName
()
{
return
clusterName
;
}
public
String
getRealm
()
{
return
realm
;
}
...
...
This diff is collapsed.
Click to expand it.
addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/events/BaseImpalaEvent.java
View file @
ef402516
...
...
@@ -340,7 +340,7 @@ public abstract class BaseImpalaEvent {
ret
.
setAttribute
(
ATTRIBUTE_QUALIFIED_NAME
,
dbQualifiedName
);
ret
.
setAttribute
(
ATTRIBUTE_NAME
,
dbName
.
toLowerCase
());
ret
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
context
.
get
ClusterNam
e
());
ret
.
setAttribute
(
ATTRIBUTE_CLUSTER_NAME
,
context
.
get
MetadataNamespac
e
());
context
.
putEntity
(
dbQualifiedName
,
ret
);
}
...
...
This diff is collapsed.
Click to expand it.
addons/impala-bridge/src/test/java/org/apache/atlas/impala/ImpalaLineageITBase.java
View file @
ef402516
...
...
@@ -293,7 +293,7 @@ public class ImpalaLineageITBase {
protected
String
assertDatabaseIsRegistered
(
String
dbName
,
AssertPredicate
assertPredicate
)
throws
Exception
{
LOG
.
debug
(
"Searching for database: {}"
,
dbName
);
String
dbQualifiedName
=
dbName
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
+
String
dbQualifiedName
=
dbName
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
+
CLUSTER_NAME
;
dbQualifiedName
=
dbQualifiedName
.
toLowerCase
();
...
...
@@ -320,7 +320,7 @@ public class ImpalaLineageITBase {
protected
String
assertTableIsRegistered
(
String
fullTableName
,
AssertPredicate
assertPredicate
,
boolean
isTemporary
)
throws
Exception
{
LOG
.
debug
(
"Searching for table {}"
,
fullTableName
);
String
tableQualifiedName
=
(
fullTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
).
toLowerCase
()
+
String
tableQualifiedName
=
(
fullTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
).
toLowerCase
()
+
CLUSTER_NAME
;
return
assertEntityIsRegistered
(
HIVE_TYPE_TABLE
,
REFERENCEABLE_ATTRIBUTE_NAME
,
tableQualifiedName
,
...
...
This diff is collapsed.
Click to expand it.
addons/impala-bridge/src/test/java/org/apache/atlas/impala/ImpalaLineageToolIT.java
View file @
ef402516
...
...
@@ -77,7 +77,7 @@ public class ImpalaLineageToolIT extends ImpalaLineageITBase {
// the value is from info in IMPALA_3
String
createTime
=
new
Long
((
long
)(
1554750072
)*
1000
).
toString
();
String
processQFName
=
"db_1.view_1"
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
+
"db_1.view_1"
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
+
CLUSTER_NAME
+
AtlasImpalaHookContext
.
QNAME_SEP_PROCESS
+
createTime
;
processQFName
=
processQFName
.
toLowerCase
();
...
...
@@ -140,7 +140,7 @@ public class ImpalaLineageToolIT extends ImpalaLineageITBase {
Long
afterCreateTime
=
System
.
currentTimeMillis
()
/
BaseImpalaEvent
.
MILLIS_CONVERT_FACTOR
;
String
processQFNameWithoutTime
=
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
+
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
+
CLUSTER_NAME
+
AtlasImpalaHookContext
.
QNAME_SEP_PROCESS
;
processQFNameWithoutTime
=
processQFNameWithoutTime
.
toLowerCase
();
...
...
@@ -210,7 +210,7 @@ public class ImpalaLineageToolIT extends ImpalaLineageITBase {
// the value is from info in IMPALA_4.
String
createTime
=
new
Long
(
TABLE_CREATE_TIME
*
1000
).
toString
();
String
processQFName
=
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
+
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
+
CLUSTER_NAME
+
AtlasImpalaHookContext
.
QNAME_SEP_PROCESS
+
createTime
;
processQFName
=
processQFName
.
toLowerCase
();
...
...
@@ -266,7 +266,7 @@ public class ImpalaLineageToolIT extends ImpalaLineageITBase {
// the value is from info in IMPALA_4.
String
createTime
=
new
Long
(
TABLE_CREATE_TIME
*
1000
).
toString
();
String
processQFName
=
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
+
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
+
CLUSTER_NAME
+
AtlasImpalaHookContext
.
QNAME_SEP_PROCESS
+
createTime
;
processQFName
=
processQFName
.
toLowerCase
();
...
...
@@ -322,9 +322,9 @@ public class ImpalaLineageToolIT extends ImpalaLineageITBase {
// the value is from info in IMPALA_4.
String
createTime1
=
new
Long
(
TABLE_CREATE_TIME_SOURCE
*
1000
).
toString
();
String
createTime2
=
new
Long
(
TABLE_CREATE_TIME
*
1000
).
toString
();
String
sourceQFName
=
dbName
+
"."
+
sourceTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
+
String
sourceQFName
=
dbName
+
"."
+
sourceTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
+
CLUSTER_NAME
+
AtlasImpalaHookContext
.
QNAME_SEP_PROCESS
+
createTime1
;
String
targetQFName
=
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
+
String
targetQFName
=
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
+
CLUSTER_NAME
+
AtlasImpalaHookContext
.
QNAME_SEP_PROCESS
+
createTime2
;
String
processQFName
=
"QUERY:"
+
sourceQFName
.
toLowerCase
()
+
"->:INSERT:"
+
targetQFName
.
toLowerCase
();
...
...
@@ -385,9 +385,9 @@ public class ImpalaLineageToolIT extends ImpalaLineageITBase {
// the value is from info in IMPALA_4.
String
createTime1
=
new
Long
(
TABLE_CREATE_TIME_SOURCE
*
1000
).
toString
();
String
createTime2
=
new
Long
(
TABLE_CREATE_TIME
*
1000
).
toString
();
String
sourceQFName
=
dbName
+
"."
+
sourceTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
+
String
sourceQFName
=
dbName
+
"."
+
sourceTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
+
CLUSTER_NAME
+
AtlasImpalaHookContext
.
QNAME_SEP_PROCESS
+
createTime1
;
String
targetQFName
=
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
+
String
targetQFName
=
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
+
CLUSTER_NAME
+
AtlasImpalaHookContext
.
QNAME_SEP_PROCESS
+
createTime2
;
String
processQFName
=
"QUERY:"
+
sourceQFName
.
toLowerCase
()
+
"->:INSERT:"
+
targetQFName
.
toLowerCase
();
...
...
@@ -454,7 +454,7 @@ public class ImpalaLineageToolIT extends ImpalaLineageITBase {
// the value is from info in IMPALA_4.
String
createTime
=
new
Long
((
long
)
1560885039
*
1000
).
toString
();
String
processQFName
=
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
+
dbName
+
"."
+
targetTableName
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
+
CLUSTER_NAME
+
AtlasImpalaHookContext
.
QNAME_SEP_PROCESS
+
createTime
;
processQFName
=
processQFName
.
toLowerCase
();
...
...
This diff is collapsed.
Click to expand it.
addons/impala-bridge/src/test/java/org/apache/atlas/impala/hook/ImpalaLineageHookIT.java
View file @
ef402516
...
...
@@ -137,7 +137,7 @@ public class ImpalaLineageHookIT extends ImpalaLineageITBase {
impalaHook
.
process
(
queryObj
);
String
createTime
=
new
Long
(
BaseImpalaEvent
.
getTableCreateTime
(
vertex5
)).
toString
();
String
processQFName
=
vertex5
.
getVertexId
()
+
AtlasImpalaHookContext
.
QNAME_SEP_
CLUSTER_NAM
E
+
vertex5
.
getVertexId
()
+
AtlasImpalaHookContext
.
QNAME_SEP_
METADATA_NAMESPAC
E
+
CLUSTER_NAME
+
AtlasImpalaHookContext
.
QNAME_SEP_PROCESS
+
createTime
;
processQFName
=
processQFName
.
toLowerCase
();
...
...
This diff is collapsed.
Click to expand it.
addons/kafka-bridge/src/bin/import-kafka.sh
View file @
ef402516
...
...
@@ -117,7 +117,7 @@ else
exit
1
fi
CP
=
"
${
KAFKA_CP
}
:
${
ATLASCPPATH
}
:
${
HADOOP
_CP
}
"
CP
=
"
${
ATLASCPPATH
}
:
${
HADOOP_CP
}
:
${
KAFKA
_CP
}
"
# If running in cygwin, convert pathnames and classpath to Windows format.
if
[
"
${
CYGWIN
}
"
==
"true"
]
...
...
This diff is collapsed.
Click to expand it.
addons/kafka-bridge/src/main/java/org/apache/atlas/kafka/bridge/KafkaBridge.java
View file @
ef402516
...
...
@@ -62,7 +62,8 @@ public class KafkaBridge {
private
static
final
int
EXIT_CODE_FAILED
=
1
;
private
static
final
String
ATLAS_ENDPOINT
=
"atlas.rest.address"
;
private
static
final
String
DEFAULT_ATLAS_URL
=
"http://localhost:21000/"
;
private
static
final
String
KAFKA_CLUSTER_NAME
=
"atlas.cluster.name"
;
private
static
final
String
CLUSTER_NAME_KEY
=
"atlas.cluster.name"
;
private
static
final
String
KAFKA_METADATA_NAMESPACE
=
"atlas.metadata.namespace"
;
private
static
final
String
DEFAULT_CLUSTER_NAME
=
"primary"
;
private
static
final
String
ATTRIBUTE_QUALIFIED_NAME
=
"qualifiedName"
;
private
static
final
String
DESCRIPTION_ATTR
=
"description"
;
...
...
@@ -81,7 +82,7 @@ public class KafkaBridge {
private
static
final
int
DEFAULT_ZOOKEEPER_CONNECTION_TIMEOUT_MS
=
10
*
1000
;
private
final
List
<
String
>
availableTopics
;
private
final
String
clusterNam
e
;
private
final
String
metadataNamespac
e
;
private
final
AtlasClientV2
atlasClientV2
;
private
final
ZkUtils
zkUtils
;
...
...
@@ -164,11 +165,19 @@ public class KafkaBridge {
ZkClient
zkClient
=
new
ZkClient
(
zookeeperConnect
,
sessionTimeOutMs
,
connectionTimeOutMs
,
ZKStringSerializer
$
.
MODULE
$
);
this
.
atlasClientV2
=
atlasClientV2
;
this
.
clusterName
=
atlasConf
.
getString
(
KAFKA_CLUSTER_NAME
,
DEFAULT_CLUSTER_NAME
);
this
.
metadataNamespace
=
getMetadataNamespace
(
atlasConf
);
this
.
zkUtils
=
new
ZkUtils
(
zkClient
,
new
ZkConnection
(
zookeeperConnect
),
JaasUtils
.
isZkSecurityEnabled
());
this
.
availableTopics
=
scala
.
collection
.
JavaConversions
.
seqAsJavaList
(
zkUtils
.
getAllTopics
());
}
private
String
getMetadataNamespace
(
Configuration
config
)
{
return
config
.
getString
(
KAFKA_METADATA_NAMESPACE
,
getClusterName
(
config
));
}
private
String
getClusterName
(
Configuration
config
)
{
return
config
.
getString
(
CLUSTER_NAME_KEY
,
DEFAULT_CLUSTER_NAME
);
}
public
void
importTopic
(
String
topicToImport
)
throws
Exception
{
List
<
String
>
topics
=
availableTopics
;
...
...
@@ -191,7 +200,7 @@ public class KafkaBridge {
@VisibleForTesting
AtlasEntityWithExtInfo
createOrUpdateTopic
(
String
topic
)
throws
Exception
{
String
topicQualifiedName
=
getTopicQualifiedName
(
clusterNam
e
,
topic
);
String
topicQualifiedName
=
getTopicQualifiedName
(
metadataNamespac
e
,
topic
);
AtlasEntityWithExtInfo
topicEntity
=
findTopicEntityInAtlas
(
topicQualifiedName
);
if
(
topicEntity
==
null
)
{
...
...
@@ -225,10 +234,10 @@ public class KafkaBridge {
ret
=
topicEntity
;
}
String
qualifiedName
=
getTopicQualifiedName
(
clusterNam
e
,
topic
);
String
qualifiedName
=
getTopicQualifiedName
(
metadataNamespac
e
,
topic
);
ret
.
setAttribute
(
ATTRIBUTE_QUALIFIED_NAME
,
qualifiedName
);
ret
.
setAttribute
(
CLUSTERNAME
,
clusterNam
e
);
ret
.
setAttribute
(
CLUSTERNAME
,
metadataNamespac
e
);
ret
.
setAttribute
(
TOPIC
,
topic
);
ret
.
setAttribute
(
NAME
,
topic
);
ret
.
setAttribute
(
DESCRIPTION_ATTR
,
topic
);
...
...
@@ -239,8 +248,8 @@ public class KafkaBridge {
}
@VisibleForTesting
static
String
getTopicQualifiedName
(
String
clusterNam
e
,
String
topic
)
{
return
String
.
format
(
FORMAT_KAKFA_TOPIC_QUALIFIED_NAME
,
topic
.
toLowerCase
(),
clusterNam
e
);
static
String
getTopicQualifiedName
(
String
metadataNamespac
e
,
String
topic
)
{
return
String
.
format
(
FORMAT_KAKFA_TOPIC_QUALIFIED_NAME
,
topic
.
toLowerCase
(),
metadataNamespac
e
);
}
private
AtlasEntityWithExtInfo
findTopicEntityInAtlas
(
String
topicQualifiedName
)
{
...
...
This diff is collapsed.
Click to expand it.
addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java
View file @
ef402516
...
...
@@ -52,7 +52,8 @@ import java.util.Date;
public
class
SqoopHook
extends
SqoopJobDataPublisher
{
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
SqoopHook
.
class
);
public
static
final
String
ATLAS_CLUSTER_NAME
=
"atlas.cluster.name"
;
public
static
final
String
CLUSTER_NAME_KEY
=
"atlas.cluster.name"
;
public
static
final
String
ATLAS_METADATA_NAMESPACE
=
"atlas.metadata.namespace"
;
public
static
final
String
DEFAULT_CLUSTER_NAME
=
"primary"
;
public
static
final
String
USER
=
"userName"
;
...
...
@@ -81,11 +82,13 @@ public class SqoopHook extends SqoopJobDataPublisher {
public
void
publish
(
SqoopJobDataPublisher
.
Data
data
)
throws
AtlasHookException
{
try
{
Configuration
atlasProperties
=
ApplicationProperties
.
get
();
String
clusterName
=
atlasProperties
.
getString
(
ATLAS_CLUSTER_NAME
,
DEFAULT_CLUSTER_NAME
);
String
metadataNamespace
=
atlasProperties
.
getString
(
ATLAS_METADATA_NAMESPACE
,
getClusterName
(
atlasProperties
));
AtlasEntity
entDbStore
=
toSqoopDBStoreEntity
(
data
);
AtlasEntity
entHiveDb
=
toHiveDatabaseEntity
(
clusterNam
e
,
data
.
getHiveDB
());
AtlasEntity
entHiveDb
=
toHiveDatabaseEntity
(
metadataNamespac
e
,
data
.
getHiveDB
());
AtlasEntity
entHiveTable
=
data
.
getHiveTable
()
!=
null
?
toHiveTableEntity
(
entHiveDb
,
data
.
getHiveTable
())
:
null
;
AtlasEntity
entProcess
=
toSqoopProcessEntity
(
entDbStore
,
entHiveDb
,
entHiveTable
,
data
,
clusterName
);
AtlasEntity
entProcess
=
toSqoopProcessEntity
(
entDbStore
,
entHiveDb
,
entHiveTable
,
data
,
metadataNamespace
);
AtlasEntitiesWithExtInfo
entities
=
new
AtlasEntitiesWithExtInfo
(
entProcess
);
...
...
@@ -105,11 +108,15 @@ public class SqoopHook extends SqoopJobDataPublisher {
}
}
private
AtlasEntity
toHiveDatabaseEntity
(
String
clusterName
,
String
dbName
)
{
private
String
getClusterName
(
Configuration
config
)
{
return
config
.
getString
(
CLUSTER_NAME_KEY
,
DEFAULT_CLUSTER_NAME
);
}
private
AtlasEntity
toHiveDatabaseEntity
(
String
metadataNamespace
,
String
dbName
)
{
AtlasEntity
entHiveDb
=
new
AtlasEntity
(
HiveDataTypes
.
HIVE_DB
.
getName
());
String
qualifiedName
=
HiveMetaStoreBridge
.
getDBQualifiedName
(
clusterNam
e
,
dbName
);
String
qualifiedName
=
HiveMetaStoreBridge
.
getDBQualifiedName
(
metadataNamespac
e
,
dbName
);
entHiveDb
.
setAttribute
(
AtlasConstants
.
CLUSTER_NAME_ATTRIBUTE
,
clusterNam
e
);
entHiveDb
.
setAttribute
(
AtlasConstants
.
CLUSTER_NAME_ATTRIBUTE
,
metadataNamespac
e
);
entHiveDb
.
setAttribute
(
AtlasClient
.
NAME
,
dbName
);
entHiveDb
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
qualifiedName
);
...
...
@@ -153,9 +160,10 @@ public class SqoopHook extends SqoopJobDataPublisher {
return
entDbStore
;
}
private
AtlasEntity
toSqoopProcessEntity
(
AtlasEntity
entDbStore
,
AtlasEntity
entHiveDb
,
AtlasEntity
entHiveTable
,
SqoopJobDataPublisher
.
Data
data
,
String
clusterName
)
{
private
AtlasEntity
toSqoopProcessEntity
(
AtlasEntity
entDbStore
,
AtlasEntity
entHiveDb
,
AtlasEntity
entHiveTable
,
SqoopJobDataPublisher
.
Data
data
,
String
metadataNamespace
)
{
AtlasEntity
entProcess
=
new
AtlasEntity
(
SqoopDataTypes
.
SQOOP_PROCESS
.
getName
());
String
sqoopProcessName
=
getSqoopProcessName
(
data
,
clusterNam
e
);
String
sqoopProcessName
=
getSqoopProcessName
(
data
,
metadataNamespac
e
);
Map
<
String
,
String
>
sqoopOptionsMap
=
new
HashMap
<>();
Properties
options
=
data
.
getOptions
();
...
...
@@ -190,7 +198,7 @@ public class SqoopHook extends SqoopJobDataPublisher {
return
data
.
getOperation
().
toLowerCase
().
equals
(
"import"
);
}
static
String
getSqoopProcessName
(
Data
data
,
String
clusterNam
e
)
{
static
String
getSqoopProcessName
(
Data
data
,
String
metadataNamespac
e
)
{
StringBuilder
name
=
new
StringBuilder
(
String
.
format
(
"sqoop %s --connect %s"
,
data
.
getOperation
(),
data
.
getUrl
()));
if
(
StringUtils
.
isNotEmpty
(
data
.
getHiveTable
()))
{
...
...
@@ -204,9 +212,9 @@ public class SqoopHook extends SqoopJobDataPublisher {
}
if
(
data
.
getHiveTable
()
!=
null
)
{
name
.
append
(
String
.
format
(
" --hive-%s --hive-database %s --hive-table %s --hive-cluster %s"
,
data
.
getOperation
(),
data
.
getHiveDB
().
toLowerCase
(),
data
.
getHiveTable
().
toLowerCase
(),
clusterNam
e
));
name
.
append
(
String
.
format
(
" --hive-%s --hive-database %s --hive-table %s --hive-cluster %s"
,
data
.
getOperation
(),
data
.
getHiveDB
().
toLowerCase
(),
data
.
getHiveTable
().
toLowerCase
(),
metadataNamespac
e
));
}
else
{
name
.
append
(
String
.
format
(
"--hive-%s --hive-database %s --hive-cluster %s"
,
data
.
getOperation
(),
data
.
getHiveDB
(),
clusterNam
e
));
name
.
append
(
String
.
format
(
"--hive-%s --hive-database %s --hive-cluster %s"
,
data
.
getOperation
(),
data
.
getHiveDB
(),
metadataNamespac
e
));
}
return
name
.
toString
();
...
...
This diff is collapsed.
Click to expand it.
addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java
View file @
ef402516
...
...
@@ -118,7 +118,7 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
topology
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
topologyInfo
.
get_name
());
topology
.
setAttribute
(
AtlasClient
.
OWNER
,
owner
);
topology
.
setAttribute
(
"startTime"
,
new
Date
(
System
.
currentTimeMillis
()));
topology
.
setAttribute
(
AtlasConstants
.
CLUSTER_NAME_ATTRIBUTE
,
get
ClusterName
(
stormConf
));
topology
.
setAttribute
(
AtlasConstants
.
CLUSTER_NAME_ATTRIBUTE
,
get
MetadataNamespace
(
));
return
topology
;
}
...
...
@@ -167,8 +167,8 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
private
AtlasEntity
addDataSet
(
String
dataSetType
,
String
topologyOwner
,
Serializable
instance
,
Map
stormConf
,
AtlasEntityExtInfo
entityExtInfo
)
{
Map
<
String
,
String
>
config
=
StormTopologyUtil
.
getFieldValues
(
instance
,
true
,
null
);
String
clusterName
=
null
;
AtlasEntity
ret
=
null
;
String
metadataNamespace
=
getMetadataNamespace
();
// todo: need to redo this with a config driven approach
switch
(
dataSetType
)
{
...
...
@@ -188,8 +188,6 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
topologyOwner
=
ANONYMOUS_OWNER
;
}
clusterName
=
getClusterName
(
stormConf
);
if
(
topicName
==
null
)
{
LOG
.
error
(
"Kafka topic name not found"
);
}
else
{
...
...
@@ -198,7 +196,7 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
ret
.
setAttribute
(
"topic"
,
topicName
);
ret
.
setAttribute
(
"uri"
,
uri
);
ret
.
setAttribute
(
AtlasClient
.
OWNER
,
topologyOwner
);
ret
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
getKafkaTopicQualifiedName
(
clusterNam
e
,
topicName
));
ret
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
getKafkaTopicQualifiedName
(
metadataNamespac
e
,
topicName
));
ret
.
setAttribute
(
AtlasClient
.
NAME
,
topicName
);
}
}
...
...
@@ -212,7 +210,7 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
uri
=
hbaseTableName
;
}
clusterName
=
extractComponentClusterNam
e
(
HBaseConfiguration
.
create
(),
stormConf
);
metadataNamespace
=
extractComponentMetadataNamespac
e
(
HBaseConfiguration
.
create
(),
stormConf
);
if
(
hbaseTableName
==
null
)
{
LOG
.
error
(
"HBase table name not found"
);
...
...
@@ -223,7 +221,7 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
ret
.
setAttribute
(
AtlasClient
.
NAME
,
uri
);
ret
.
setAttribute
(
AtlasClient
.
OWNER
,
stormConf
.
get
(
"storm.kerberos.principal"
));
//TODO - Hbase Namespace is hardcoded to 'default'. need to check how to get this or is it already part of tableName
ret
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
getHbaseTableQualifiedName
(
clusterNam
e
,
HBASE_NAMESPACE_DEFAULT
,
hbaseTableName
));
ret
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
getHbaseTableQualifiedName
(
metadataNamespac
e
,
HBASE_NAMESPACE_DEFAULT
,
hbaseTableName
));
}
}
break
;
...
...
@@ -234,11 +232,9 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
final
Path
hdfsPath
=
new
Path
(
hdfsPathStr
);
final
String
nameServiceID
=
HdfsNameServiceResolver
.
getNameServiceIDForPath
(
hdfsPathStr
);
clusterName
=
getClusterName
(
stormConf
);
ret
=
new
AtlasEntity
(
HiveMetaStoreBridge
.
HDFS_PATH
);
ret
.
setAttribute
(
AtlasConstants
.
CLUSTER_NAME_ATTRIBUTE
,
getClusterName
(
stormConf
)
);
ret
.
setAttribute
(
AtlasConstants
.
CLUSTER_NAME_ATTRIBUTE
,
metadataNamespace
);
ret
.
setAttribute
(
AtlasClient
.
OWNER
,
stormConf
.
get
(
"hdfs.kerberos.principal"
));
ret
.
setAttribute
(
AtlasClient
.
NAME
,
Path
.
getPathWithoutSchemeAndAuthority
(
hdfsPath
).
toString
().
toLowerCase
());
...
...
@@ -247,16 +243,16 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
ret
.
setAttribute
(
"path"
,
updatedPath
);
ret
.
setAttribute
(
"nameServiceId"
,
nameServiceID
);
ret
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
getHdfsPathQualifiedName
(
clusterNam
e
,
updatedPath
));
ret
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
getHdfsPathQualifiedName
(
metadataNamespac
e
,
updatedPath
));
}
else
{
ret
.
setAttribute
(
"path"
,
hdfsPathStr
);
ret
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
getHdfsPathQualifiedName
(
clusterNam
e
,
hdfsPathStr
));
ret
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
getHdfsPathQualifiedName
(
metadataNamespac
e
,
hdfsPathStr
));
}
}
break
;
case
"HiveBolt"
:
{
clusterName
=
extractComponentClusterNam
e
(
new
HiveConf
(),
stormConf
);
metadataNamespace
=
extractComponentMetadataNamespac
e
(
new
HiveConf
(),
stormConf
);
final
String
dbName
=
config
.
get
(
"HiveBolt.options.databaseName"
);
final
String
tblName
=
config
.
get
(
"HiveBolt.options.tableName"
);
...
...
@@ -267,8 +263,8 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
AtlasEntity
dbEntity
=
new
AtlasEntity
(
"hive_db"
);
dbEntity
.
setAttribute
(
AtlasClient
.
NAME
,
dbName
);
dbEntity
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
HiveMetaStoreBridge
.
getDBQualifiedName
(
getClusterName
(
stormConf
)
,
dbName
));
dbEntity
.
setAttribute
(
AtlasConstants
.
CLUSTER_NAME_ATTRIBUTE
,
getClusterName
(
stormConf
)
);
dbEntity
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
HiveMetaStoreBridge
.
getDBQualifiedName
(
metadataNamespace
,
dbName
));
dbEntity
.
setAttribute
(
AtlasConstants
.
CLUSTER_NAME_ATTRIBUTE
,
metadataNamespace
);
entityExtInfo
.
addReferredEntity
(
dbEntity
);
...
...
@@ -277,7 +273,7 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
ret
.
setAttribute
(
AtlasClient
.
NAME
,
tblName
);
ret
.
setAttribute
(
ATTRIBUTE_DB
,
AtlasTypeUtil
.
getAtlasObjectId
(
dbEntity
));
ret
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
HiveMetaStoreBridge
.
getTableQualifiedName
(
clusterNam
e
,
dbName
,
tblName
));
ret
.
setAttribute
(
AtlasClient
.
REFERENCEABLE_ATTRIBUTE_NAME
,
HiveMetaStoreBridge
.
getTableQualifiedName
(
metadataNamespac
e
,
dbName
,
tblName
));
}
}
break
;
...
...
@@ -384,30 +380,25 @@ public class StormAtlasHook extends AtlasHook implements ISubmitterHook {
}
}
public
static
String
getKafkaTopicQualifiedName
(
String
clusterNam
e
,
String
topicName
)
{
return
String
.
format
(
"%s@%s"
,
topicName
.
toLowerCase
(),
clusterNam
e
);
public
static
String
getKafkaTopicQualifiedName
(
String
metadataNamespac
e
,
String
topicName
)
{
return
String
.
format
(
"%s@%s"
,
topicName
.
toLowerCase
(),
metadataNamespac
e
);
}
public
static
String
getHbaseTableQualifiedName
(
String
clusterNam
e
,
String
nameSpace
,
String
tableName
)
{
return
String
.
format
(
"%s.%s@%s"
,
nameSpace
.
toLowerCase
(),
tableName
.
toLowerCase
(),
clusterNam
e
);
public
static
String
getHbaseTableQualifiedName
(
String
metadataNamespac
e
,
String
nameSpace
,
String
tableName
)
{
return
String
.
format
(
"%s.%s@%s"
,
nameSpace
.
toLowerCase
(),
tableName
.
toLowerCase
(),
metadataNamespac
e
);
}
public
static
String
getHdfsPathQualifiedName
(
String
clusterNam
e
,
String
hdfsPath
)
{
return
String
.
format
(
"%s@%s"
,
hdfsPath
.
toLowerCase
(),
clusterNam
e
);
public
static
String
getHdfsPathQualifiedName
(
String
metadataNamespac
e
,
String
hdfsPath
)
{
return
String
.
format
(
"%s@%s"
,
hdfsPath
.
toLowerCase
(),
metadataNamespac
e
);
}
private
String
getClusterName
(
Map
stormConf
)
{
return
atlasProperties
.
getString
(
AtlasConstants
.
CLUSTER_NAME_KEY
,
AtlasConstants
.
DEFAULT_CLUSTER_NAME
);
}
private
String
extractComponentClusterName
(
Configuration
configuration
,
Map
stormConf
)
{
String
clusterName
=
configuration
.
get
(
AtlasConstants
.
CLUSTER_NAME_KEY
,
null
);
private
String
extractComponentMetadataNamespace
(
Configuration
configuration
,
Map
stormConf
)
{
String
clusterName
=
configuration
.
get
(
CLUSTER_NAME_KEY
,
null
);
if
(
clusterName
==
null
)
{
clusterName
=
get
ClusterName
(
stormConf
);
clusterName
=
get
MetadataNamespace
(
);
}
return
clusterName
;
}
}
\ No newline at end of file
This diff is collapsed.
Click to expand it.
common/src/main/java/org/apache/atlas/AtlasConstants.java
View file @
ef402516
...
...
@@ -26,15 +26,15 @@ public final class AtlasConstants {
}
public
static
final
String
CLUSTER_NAME_KEY
=
"atlas.cluster.name"
;
public
static
final
String
DEFAULT_CLUSTER_NAME
=
"primary"
;
public
static
final
String
CLUSTER_NAME_ATTRIBUTE
=
"clusterName"
;
public
static
final
String
SYSTEM_PROPERTY_APP_PORT
=
"atlas.app.port"
;
public
static
final
String
DEFAULT_APP_PORT_STR
=
"21000"
;
public
static
final
String
ATLAS_REST_ADDRESS_KEY
=
"atlas.rest.address"
;
public
static
final
String
ATLAS_MIGRATION_MODE_FILENAME
=
"atlas.migration.data.filename"
;
public
static
final
String
ATLAS_SERVICES_ENABLED
=
"atlas.services.enabled"
;
public
static
final
String
CLUSTER_NAME_ATTRIBUTE
=
"clusterName"
;
public
static
final
String
DEFAULT_APP_PORT_STR
=
"21000"
;
public
static
final
String
DEFAULT_ATLAS_REST_ADDRESS
=
"http://localhost:21000"
;
public
static
final
int
ATLAS_SHUTDOWN_HOOK_PRIORITY
=
30
;
public
static
final
String
DEFAULT_TYPE_VERSION
=
"1.0"
;
public
static
final
int
ATLAS_SHUTDOWN_HOOK_PRIORITY
=
30
;
public
static
final
String
ATLAS_MIGRATION_MODE_FILENAME
=
"atlas.migration.data.filename"
;
public
static
final
String
ATLAS_SERVICES_ENABLED
=
"atlas.services.enabled"
;
}
This diff is collapsed.
Click to expand it.
notification/src/main/java/org/apache/atlas/hook/AtlasHook.java
View file @
ef402516
...
...
@@ -59,10 +59,14 @@ public abstract class AtlasHook {
public
static
final
String
ATLAS_NOTIFICATION_FAILED_MESSAGES_FILENAME_KEY
=
"atlas.notification.failed.messages.filename"
;
public
static
final
String
ATLAS_NOTIFICATION_LOG_FAILED_MESSAGES_ENABLED_KEY
=
"atlas.notification.log.failed.messages"
;
public
static
final
String
ATLAS_HOOK_FAILED_MESSAGES_LOG_DEFAULT_NAME
=
"atlas_hook_failed_messages.log"
;
public
static
final
String
CONF_METADATA_NAMESPACE
=
"atlas.metadata.namespace"
;
public
static
final
String
CLUSTER_NAME_KEY
=
"atlas.cluster.name"
;
public
static
final
String
DEFAULT_CLUSTER_NAME
=
"primary"
;
protected
static
Configuration
atlasProperties
;
protected
static
NotificationInterface
notificationInterface
;
private
static
final
String
metadataNamespace
;
private
static
final
int
SHUTDOWN_HOOK_WAIT_TIME_MS
=
3000
;
private
static
final
boolean
logFailedMessages
;
private
static
final
FailedMessagesLogger
failedMessagesLogger
;
...
...
@@ -95,6 +99,7 @@ public abstract class AtlasHook {
}
}
metadataNamespace
=
getMetadataNamespace
(
atlasProperties
);
notificationMaxRetries
=
atlasProperties
.
getInt
(
ATLAS_NOTIFICATION_MAX_RETRIES
,
3
);
notificationRetryInterval
=
atlasProperties
.
getInt
(
ATLAS_NOTIFICATION_RETRY_INTERVAL
,
1000
);
notificationInterface
=
NotificationProvider
.
get
();
...
...
@@ -306,4 +311,15 @@ public abstract class AtlasHook {
return
ret
;
}
private
static
String
getMetadataNamespace
(
Configuration
config
)
{
return
config
.
getString
(
CONF_METADATA_NAMESPACE
,
getClusterName
(
config
));
}
private
static
String
getClusterName
(
Configuration
config
)
{
return
config
.
getString
(
CLUSTER_NAME_KEY
,
DEFAULT_CLUSTER_NAME
);
}
public
String
getMetadataNamespace
()
{
return
metadataNamespace
;
}
}
\ No newline at end of file
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment