Commit b05b8556 by rmani Committed by apoorvnaik

ATLAS-1805:Provide an Atlas hook to send Hbase Namespace/Table/column family metadata to Atlas

parent d9b2bd06
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>apache-atlas</artifactId>
<groupId>org.apache.atlas</groupId>
<version>0.8.2-SNAPSHOT</version>
<relativePath>../../</relativePath>
</parent>
<artifactId>hbase-bridge-shim</artifactId>
<description>Apache Atlas Hbase Bridge Shim Module</description>
<name>Apache Atlas Hbase Bridge Shim</name>
<packaging>jar</packaging>
<dependencies>
<!-- Logging -->
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-plugin-classloader</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>${hbase.version}</version>
</dependency>
</dependencies>
</project>
This source diff could not be displayed because it is too large. You can view the blob instead.
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.hbase.hook;
import org.apache.atlas.hbase.bridge.HBaseAtlasHook;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class HBaseAtlasCoprocessor extends HBaseAtlasCoprocessorBase {
private static final Logger LOG = LoggerFactory.getLogger(HBaseAtlasCoprocessor.class);
final HBaseAtlasHook hbaseAtlasHook;
public HBaseAtlasCoprocessor() {
hbaseAtlasHook = HBaseAtlasHook.getInstance();
}
@Override
public void postCreateTable(ObserverContext<MasterCoprocessorEnvironment> observerContext, HTableDescriptor hTableDescriptor, HRegionInfo[] hRegionInfos) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessoror.postCreateTable()");
}
hbaseAtlasHook.sendHBaseTableOperation(hTableDescriptor, null, HBaseAtlasHook.OPERATION.CREATE_TABLE);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessoror.postCreateTable()");
}
}
@Override
public void postDeleteTable(ObserverContext<MasterCoprocessorEnvironment> observerContext, TableName tableName) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessor.postDeleteTable()");
}
hbaseAtlasHook.sendHBaseTableOperation(null, tableName, HBaseAtlasHook.OPERATION.DELETE_TABLE);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postDeleteTable()");
}
}
@Override
public void postModifyTable(ObserverContext<MasterCoprocessorEnvironment> observerContext, TableName tableName, HTableDescriptor hTableDescriptor) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessor.postModifyTable()");
}
hbaseAtlasHook.sendHBaseTableOperation(hTableDescriptor, tableName, HBaseAtlasHook.OPERATION.ALTER_TABLE);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postModifyTable()");
}
}
@Override
public void postAddColumn(ObserverContext<MasterCoprocessorEnvironment> observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessor.postAddColumn()");
}
hbaseAtlasHook.sendHBaseColumnFamilyOperation(hColumnDescriptor, tableName, null, HBaseAtlasHook.OPERATION.CREATE_COLUMN_FAMILY);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postAddColumn()");
}
}
@Override
public void postModifyColumn(ObserverContext<MasterCoprocessorEnvironment> observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessor.postModifyColumn()");
}
hbaseAtlasHook.sendHBaseColumnFamilyOperation(hColumnDescriptor, tableName, null, HBaseAtlasHook.OPERATION.ALTER_COLUMN_FAMILY);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postModifyColumn()");
}
}
@Override
public void postDeleteColumn(ObserverContext<MasterCoprocessorEnvironment> observerContext, TableName tableName, byte[] bytes) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessor.postDeleteColumn()");
}
String columnFamily = Bytes.toString(bytes);
hbaseAtlasHook.sendHBaseColumnFamilyOperation(null, tableName, columnFamily, HBaseAtlasHook.OPERATION.DELETE_COLUMN_FAMILY);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postDeleteColumn()");
}
}
@Override
public void postCreateNamespace(ObserverContext<MasterCoprocessorEnvironment> observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessor.postCreateNamespace()");
}
hbaseAtlasHook.sendHBaseNameSpaceOperation(namespaceDescriptor, null, HBaseAtlasHook.OPERATION.CREATE_NAMESPACE);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postCreateNamespace()");
}
}
@Override
public void postDeleteNamespace(ObserverContext<MasterCoprocessorEnvironment> observerContext, String s) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessor.postDeleteNamespace()");
}
hbaseAtlasHook.sendHBaseNameSpaceOperation(null, s, HBaseAtlasHook.OPERATION.DELETE_NAMESPACE);
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessor.postDeleteNamespace()");
}
}
@Override
public void postModifyNamespace(ObserverContext<MasterCoprocessorEnvironment> observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessor.postModifyNamespace()");
}
hbaseAtlasHook.sendHBaseNameSpaceOperation(namespaceDescriptor, null, HBaseAtlasHook.OPERATION.ALTER_NAMESPACE);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postModifyNamespace()");
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.hbase.model;
/**
* HBASE Data Types for model and bridge.
*/
public enum HBaseDataTypes {
// Classes
HBASE_NAMESPACE,
HBASE_TABLE,
HBASE_COLUMN_FAMILY,
HBASE_COLUMN;
public String getName() {
return name().toLowerCase();
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.hbase.model;
import org.apache.atlas.hbase.bridge.HBaseAtlasHook;
import org.apache.atlas.notification.hook.HookNotification;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.security.UserGroupInformation;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class HBaseOperationContext {
private final UserGroupInformation ugi;
private final Map<String, String> hbaseConf;
private final HBaseAtlasHook.OPERATION operation;
private final String user;
private final NamespaceDescriptor namespaceDescriptor;
private final HTableDescriptor hTableDescriptor;
private final HColumnDescriptor[] hColumnDescriptors;
private final TableName tableName;
private final String nameSpace;
private final String columnFamily;
private final String owner;
private final HColumnDescriptor hColumnDescriptor;
public HBaseOperationContext(NamespaceDescriptor namespaceDescriptor, String nameSpace, HTableDescriptor hTableDescriptor, TableName tableName, HColumnDescriptor[] hColumnDescriptors,
HColumnDescriptor hColumnDescriptor, String columnFamily, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi , String user, String owner,
Map<String, String> hbaseConf) {
this.namespaceDescriptor = namespaceDescriptor;
this.nameSpace = nameSpace;
this.hTableDescriptor = hTableDescriptor;
this.tableName = tableName;
this.hColumnDescriptors = hColumnDescriptors;
this.hColumnDescriptor = hColumnDescriptor;
this.columnFamily = columnFamily;
this.operation = operation;
this.ugi = ugi;
this.user = user;
this.owner = owner;
this.hbaseConf = hbaseConf;
}
public HBaseOperationContext(NamespaceDescriptor namespaceDescriptor, String nameSpace, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi , String user, String owner) {
this(namespaceDescriptor, nameSpace, null, null, null, null, null, operation, ugi, user, owner, null);
}
public HBaseOperationContext(String nameSpace, HTableDescriptor hTableDescriptor, TableName tableName, HColumnDescriptor[] hColumnDescriptor, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi, String user, String owner, Map<String,String> hbaseConf) {
this(null, nameSpace, hTableDescriptor, tableName, hColumnDescriptor, null, null, operation, ugi, user, owner, hbaseConf);
}
public HBaseOperationContext(String nameSpace, TableName tableName, HColumnDescriptor hColumnDescriptor, String columnFamily, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi, String user, String owner, Map<String,String> hbaseConf) {
this(null, nameSpace, null, tableName, null, hColumnDescriptor, columnFamily, operation, ugi, user, owner, hbaseConf);
}
private List<HookNotification.HookNotificationMessage> messages = new ArrayList<>();
public UserGroupInformation getUgi() {
return ugi;
}
public Map<String, String> getHbaseConf() {
return hbaseConf;
}
public String getUser() {
return user;
}
public HBaseAtlasHook.OPERATION getOperation() {
return operation;
}
public NamespaceDescriptor getNamespaceDescriptor() {
return namespaceDescriptor;
}
public HTableDescriptor gethTableDescriptor() {
return hTableDescriptor;
}
public HColumnDescriptor[] gethColumnDescriptors() {
return hColumnDescriptors;
}
public TableName getTableName() {
return tableName;
}
public String getNameSpace() {
return nameSpace;
}
public HColumnDescriptor gethColumnDescriptor() {
return hColumnDescriptor;
}
public String getColummFamily() {
return columnFamily;
}
public void addMessage(HookNotification.HookNotificationMessage message) {
messages.add(message);
}
public String getOwner() {
return owner;
}
public List<HookNotification.HookNotificationMessage> getMessages() {
return messages;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
toString(sb);
return sb.toString();
}
public StringBuilder toString(StringBuilder sb) {
sb.append("HBaseOperationContext={");
sb.append("Operation={").append(operation).append("} ");
sb.append("User ={").append(user).append("} ");
if (nameSpace != null ) {
sb.append("NameSpace={").append(nameSpace).append("}");
} else {
if (namespaceDescriptor != null) {
sb.append("NameSpace={").append(namespaceDescriptor.toString()).append("}");
}
}
if (tableName != null ) {
sb.append("Table={").append(tableName).append("}");
} else {
if ( hColumnDescriptor != null) {
sb.append("Table={").append(hTableDescriptor.toString()).append("}");
}
}
if (columnFamily != null ) {
sb.append("Columm Family={").append(columnFamily).append("}");
} else {
if ( hColumnDescriptor != null) {
sb.append("Columm Family={").append(hColumnDescriptor.toString()).append("}");
}
}
sb.append("Message ={").append(getMessages()).append("} ");
sb.append(" }");
return sb;
}
}
<?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
<appender name="console" class="org.apache.log4j.ConsoleAppender">
<param name="Target" value="System.out"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%C{1}:%L)%n"/>
</layout>
</appender>
<appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="${atlas.log.dir}/${atlas.log.file}"/>
<param name="Append" value="true"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%C{1}:%L)%n"/>
</layout>
</appender>
<logger name="org.apache.atlas" additivity="false">
<level value="info"/>
<appender-ref ref="FILE"/>
</logger>
<!-- to avoid logs - The configuration log.flush.interval.messages = 1 was supplied but isn't a known config -->
<logger name="org.apache.kafka.common.config.AbstractConfig" additivity="false">
<level value="error"/>
<appender-ref ref="FILE"/>
</logger>
<root>
<priority value="info"/>
<appender-ref ref="FILE"/>
</root>
</log4j:configuration>
......@@ -4,6 +4,47 @@
"classificationDefs": [],
"entityDefs": [
{
"name": "hbase_namespace",
"superTypes": [
"DataSet"
],
"typeVersion": "1.0",
"attributeDefs": [
{
"name": "clusterName",
"typeName": "string",
"cardinality": "SINGLE",
"isIndexable": true,
"isOptional": false,
"isUnique": false
},
{
"name": "parameters",
"typeName": "map<string,string>",
"cardinality": "SINGLE",
"isIndexable": false,
"isOptional": true,
"isUnique": false
},
{
"name": "createTime",
"typeName": "date",
"cardinality": "SINGLE",
"isIndexable": false,
"isOptional": true,
"isUnique": false
},
{
"name": "modifiedTime",
"typeName": "date",
"cardinality": "SINGLE",
"isIndexable": false,
"isOptional": true,
"isUnique": false
}
]
},
{
"name": "hbase_table",
"superTypes": [
"DataSet"
......
{
"patches": [
{
"action": "ADD_ATTRIBUTE",
"typeName": "hbase_table",
"applyToVersion": "1.1",
"updateToVersion": "1.2",
"params": null,
"attributeDefs": [
{
"name": "namespace",
"typeName": "hbase_namespace",
"cardinality": "SINGLE",
"isIndexable": false,
"isOptional": true,
"isUnique": false
},
{
"name": "parameters",
"typeName": "map<string,string>",
"cardinality": "SINGLE",
"isIndexable": false,
"isOptional": true,
"isUnique": false
},
{
"name": "createTime",
"typeName": "date",
"cardinality": "SINGLE",
"isIndexable": false,
"isOptional": true,
"isUnique": false
},
{
"name": "modifiedTime",
"typeName": "date",
"cardinality": "SINGLE",
"isIndexable": false,
"isOptional": true,
"isUnique": false
}
]
},
{
"action": "ADD_ATTRIBUTE",
"typeName": "hbase_column_family",
"applyToVersion": "1.0",
"updateToVersion": "1.1",
"params": null,
"attributeDefs": [
{
"name": "createTime",
"typeName": "date",
"cardinality": "SINGLE",
"isIndexable": false,
"isOptional": true,
"isUnique": false
},
{
"name": "modifiedTime",
"typeName": "date",
"cardinality": "SINGLE",
"isIndexable": false,
"isOptional": true,
"isUnique": false
}
]
}
]
}
\ No newline at end of file
......@@ -130,6 +130,17 @@
<outputDirectory>hook</outputDirectory>
</fileSet>
<!-- addons/hbase -->
<fileSet>
<directory>../addons/hbase-bridge/target/dependency/bridge</directory>
<outputDirectory>bridge</outputDirectory>
</fileSet>
<fileSet>
<directory>../addons/hbase-bridge/target/dependency/hook</directory>
<outputDirectory>hook</outputDirectory>
</fileSet>
<!-- addons/falcon -->
<fileSet>
<directory>../addons/falcon-bridge/target/dependency/hook</directory>
......
......@@ -679,7 +679,8 @@
<module>addons/sqoop-bridge</module>
<module>addons/storm-bridge-shim</module>
<module>addons/storm-bridge</module>
<module>addons/hbase-bridge-shim</module>
<module>addons/hbase-bridge</module>
<module>distro</module>
<module>build-tools</module>
</modules>
......@@ -1519,6 +1520,19 @@
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>hbase-bridge</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>hbase-bridge-shim</artifactId>
<version>${project.version}</version>
</dependency>
<!--Scala dependencies-->
<dependency>
<groupId>org.scala-lang</groupId>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment