Commit 99cfa510 by a760104
parents 36c2d35d 2412d7cc
...@@ -33,40 +33,12 @@ ...@@ -33,40 +33,12 @@
<name>Apache Metadata Commons</name> <name>Apache Metadata Commons</name>
<packaging>jar</packaging> <packaging>jar</packaging>
<profiles>
<profile>
<id>hadoop-2</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-client</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<classifier>tests</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<classifier>tests</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
</dependencies>
</profile>
</profiles>
<dependencies>
<dependency> <dependency>
<groupId>commons-el</groupId> <groupId>commons-el</groupId>
<artifactId>commons-el</artifactId> <artifactId>commons-el</artifactId>
......
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>metadata-governance</artifactId>
<groupId>org.apache.hadoop.metadata</groupId>
<version>0.1-incubating-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>metadata-falcontypes</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.falcon</groupId>
<artifactId>falcon-client</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-typesystem</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
</dependency>
</dependencies>
</project>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metadata.falcon;
import com.google.inject.Inject;
import org.apache.falcon.client.FalconCLIException;
import org.apache.falcon.client.FalconClient;
import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.cluster.Cluster;
import org.apache.falcon.entity.v0.cluster.Interface;
import org.apache.falcon.entity.v0.cluster.Location;
import org.apache.falcon.entity.v0.cluster.Properties;
import org.apache.falcon.entity.v0.cluster.Property;
import org.apache.falcon.resource.EntityList;
import org.apache.hadoop.metadata.ITypedInstance;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.Struct;
import org.apache.hadoop.metadata.storage.IRepository;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructType;
import org.parboiled.common.StringUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class FalconImporter {
private final FalconTypeSystem typeSystem;
private final FalconClient client;
private final IRepository repository;
@Inject
public FalconImporter(FalconTypeSystem typeSystem, FalconClient client, IRepository repo) {
this.typeSystem = typeSystem;
this.client = client;
this.repository = repo;
}
private ITypedReferenceableInstance importClusters() throws FalconCLIException, MetadataException {
EntityList clusters = client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null, null, null);
for (EntityList.EntityElement element : clusters.getElements()) {
Cluster cluster = (Cluster) client.getDefinition(EntityType.CLUSTER.name(), element.name);
Referenceable entityRef = new Referenceable(FalconTypeSystem.DefinedTypes.ENTITY.name());
entityRef.set("name", cluster.getName());
if (cluster.getACL() != null) {
Struct acl = new Struct(FalconTypeSystem.DefinedTypes.ACL.name());
acl.set("owner", cluster.getACL().getOwner());
acl.set("group", cluster.getACL().getGroup());
acl.set("permission", cluster.getACL().getPermission());
StructType aclType = (StructType) typeSystem.getDataType(FalconTypeSystem.DefinedTypes.ACL.name());
entityRef.set("acl", aclType.convert(acl, Multiplicity.REQUIRED));
}
if (StringUtils.isNotEmpty(cluster.getTags())) {
entityRef.set("tags", getMap(cluster.getTags()));
}
if (cluster.getProperties() != null) {
entityRef.set("properties", getMap(cluster.getProperties()));
}
repository.create(entityRef);
Referenceable clusterRef = new Referenceable(FalconTypeSystem.DefinedTypes.CLUSTER.name());
if (cluster.getLocations() != null) {
List<ITypedInstance> locations = new ArrayList<>();
for (Location loc : cluster.getLocations().getLocations()) {
Struct location = new Struct(FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION.name());
location.set("type", loc.getName());
location.set("path", loc.getPath());
StructType type = (StructType) typeSystem.getDataType(FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION.name());
locations.add(type.convert(location, Multiplicity.REQUIRED));
}
clusterRef.set("locations", locations);
}
if (cluster.getInterfaces() != null) {
List<ITypedInstance> interfaces = new ArrayList<>();
for (Interface interfaceFld : cluster.getInterfaces().getInterfaces()) {
Struct interfaceStruct = new Struct(FalconTypeSystem.DefinedTypes.CLUSTER_INTERFACE.name());
interfaceStruct.set("type", interfaceFld.getType().name());
interfaceStruct.set("endpoint", interfaceFld.getEndpoint());
interfaceStruct.set("version", interfaceFld.getVersion());
StructType type = (StructType) typeSystem.getDataType(FalconTypeSystem.DefinedTypes.CLUSTER_INTERFACE.name());
interfaces.add(type.convert(interfaceStruct, Multiplicity.REQUIRED));
}
clusterRef.set("interfaces", interfaces);
}
}
return null;
}
private Map<String, String> getMap(Properties properties) {
Map<String, String> map = new HashMap();
for (Property property : properties.getProperties()) {
map.put(property.getName().trim(), property.getValue().trim());
}
return map;
}
private Map<String, String> getMap(String tags) {
Map<String, String> map = new HashMap();
String[] parts = tags.split(",");
for (String part : parts) {
String[] kv = part.trim().split("=");
map.put(kv[0].trim(), kv[1].trim());
}
return map;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metadata.falcon;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.DataTypes;
import org.apache.hadoop.metadata.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.types.EnumValue;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.IDataType;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructTypeDefinition;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
public class FalconTypeSystem {
public static final Logger LOG = LoggerFactory.getLogger(FalconTypeSystem.class);
private static FalconTypeSystem INSTANCE;
public static final TypeSystem TYPE_SYSTEM = TypeSystem.getInstance();
private final Map<String, IDataType> typeMap = new HashMap<>();
private Map<String, EnumTypeDefinition> enumTypeDefinitionMap = new HashMap<>();
private Map<String, StructTypeDefinition> structTypeDefinitionMap = new HashMap<>();
public FalconTypeSystem getInstance() throws MetadataException {
if (INSTANCE == null) {
synchronized(this) {
if (INSTANCE == null) {
INSTANCE = new FalconTypeSystem();
}
}
}
return INSTANCE;
}
private FalconTypeSystem() throws MetadataException {
defineEntity();
HierarchicalTypeDefinition<ClassType> cluster = defineCluster();
//TODO define feed and process
typeMap.putAll(
TYPE_SYSTEM.defineTypes(ImmutableList.copyOf(structTypeDefinitionMap.values()), null,
ImmutableList.of(cluster)));
}
private HierarchicalTypeDefinition<ClassType> defineCluster() throws MetadataException {
defineClusterInterface();
defineClusterLocation();
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("locations", TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.COLLECTION, false, null),
new AttributeDefinition("interfaces", DefinedTypes.CLUSTER_INTERFACE.name(), Multiplicity.COLLECTION, false, null),
};
HierarchicalTypeDefinition<ClassType> cluster =
new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.CLUSTER.name(), ImmutableList.of(DefinedTypes.ENTITY.name()), attributeDefinitions);
LOG.debug("Created definition for " + DefinedTypes.CLUSTER.name());
return cluster;
}
private StructTypeDefinition defineClusterLocation() {
EnumValue values[] = {
new EnumValue("WORKING", 1),
new EnumValue("STAGING", 2),
new EnumValue("TEMP", 3),
};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_LOCATION_TYPE.name());
EnumTypeDefinition locationType = new EnumTypeDefinition(DefinedTypes.CLUSTER_LOCATION_TYPE.name(), values);
enumTypeDefinitionMap.put(locationType.name, locationType);
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("type", DefinedTypes.CLUSTER_LOCATION_TYPE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("path", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_LOCATION.name());
StructTypeDefinition location = new StructTypeDefinition(DefinedTypes.CLUSTER_LOCATION.name(), attributeDefinitions);
structTypeDefinitionMap.put(location.typeName, location);
return location;
}
private StructTypeDefinition defineClusterInterface() {
EnumValue values[] = {
new EnumValue("READ_ONLY", 1),
new EnumValue("WRITE", 2),
new EnumValue("EXECUTE", 3),
new EnumValue("WORKFLOW", 4),
new EnumValue("MESSAGING", 5),
new EnumValue("REGISTRY", 6),
};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_INTERFACE_TYPE.name());
EnumTypeDefinition interfaceType = new EnumTypeDefinition(DefinedTypes.CLUSTER_INTERFACE_TYPE.name(), values);
enumTypeDefinitionMap.put(interfaceType.name, interfaceType);
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("type", DefinedTypes.CLUSTER_INTERFACE_TYPE.name(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("endpoint", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("version", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
};
LOG.debug("Created definition for " + DefinedTypes.CLUSTER_INTERFACE.name());
StructTypeDefinition interfaceEntity = new StructTypeDefinition(DefinedTypes.CLUSTER_INTERFACE.name(), attributeDefinitions);
structTypeDefinitionMap.put(interfaceEntity.typeName, interfaceEntity);
return interfaceEntity;
}
private StructTypeDefinition defineEntity() throws MetadataException {
defineACL();
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("acl", DefinedTypes.ACL.name(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("tags", TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("properties", TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.OPTIONAL, false, null),
};
LOG.debug("Created definition for " + DefinedTypes.ENTITY.name());
StructTypeDefinition entity = new StructTypeDefinition(DefinedTypes.ENTITY.name(), attributeDefinitions);
structTypeDefinitionMap.put(entity.typeName, entity);
return entity;
}
public static enum DefinedTypes {
ACL,
ENTITY,
CLUSTER,
CLUSTER_INTERFACE,
CLUSTER_INTERFACE_TYPE,
CLUSTER_LOCATION,
CLUSTER_LOCATION_TYPE;
}
private StructTypeDefinition defineACL() {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("group", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("permission", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
};
LOG.debug("Created definition for " + DefinedTypes.ACL.name());
StructTypeDefinition acl = new StructTypeDefinition(DefinedTypes.ACL.name(), attributeDefinitions);
structTypeDefinitionMap.put(acl.typeName, acl);
return acl;
}
public IDataType getDataType(String typeName) {
return typeMap.get(typeName);
}
}
...@@ -28,115 +28,27 @@ ...@@ -28,115 +28,27 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>metadata-hivetypes</artifactId> <artifactId>metadata-hivetypes</artifactId>
<profiles>
<profile>
<id>hadoop-2</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId>
</dependency>
</dependencies>
</profile>
</profiles>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hive</groupId> <groupId>org.apache.hive</groupId>
<artifactId>hive-metastore</artifactId> <artifactId>hive-metastore</artifactId>
<version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hive</groupId> <groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId> <artifactId>hive-exec</artifactId>
<version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-common</artifactId>
<version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-cli</artifactId>
<version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.calcite</groupId> <groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId> <artifactId>calcite-avatica</artifactId>
<version>0.9.2-incubating</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.calcite</groupId> <groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId> <artifactId>calcite-core</artifactId>
<version>0.9.2-incubating</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.code.gson</groupId> <groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId> <artifactId>gson</artifactId>
...@@ -152,17 +64,9 @@ ...@@ -152,17 +64,9 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-client</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId> <groupId>log4j</groupId>
<artifactId>log4j</artifactId> <artifactId>log4j</artifactId>
</dependency> </dependency>
......
...@@ -18,7 +18,6 @@ ...@@ -18,7 +18,6 @@
package org.apache.hadoop.metadata.hivetypes; package org.apache.hadoop.metadata.hivetypes;
import org.apache.hadoop.hive.cli.CliDriver;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.FieldSchema;
...@@ -29,7 +28,6 @@ import org.apache.hadoop.hive.metastore.api.Partition; ...@@ -29,7 +28,6 @@ import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.service.HiveClient;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.ITypedStruct; import org.apache.hadoop.metadata.ITypedStruct;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
......
...@@ -23,11 +23,8 @@ import com.tinkerpop.blueprints.Edge; ...@@ -23,11 +23,8 @@ import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Graph; import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.cli.CliDriver;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.service.HiveClient;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository; import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
...@@ -43,13 +40,8 @@ import org.testng.annotations.Test; ...@@ -43,13 +40,8 @@ import org.testng.annotations.Test;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.File; import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URL;
import java.util.List; import java.util.List;
import java.util.Properties;
@Test (enabled = false) @Test (enabled = false)
public class HiveGraphRepositoryTest { public class HiveGraphRepositoryTest {
......
...@@ -21,15 +21,11 @@ package org.apache.hadoop.metadata.hivetypes; ...@@ -21,15 +21,11 @@ package org.apache.hadoop.metadata.hivetypes;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.plan.api.QueryPlan;
import org.apache.hadoop.hive.service.HiveClient;
import org.apache.hadoop.metadata.ITypedReferenceableInstance; import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.storage.Id; import org.apache.hadoop.metadata.storage.Id;
import org.apache.hadoop.metadata.storage.memory.MemRepository; import org.apache.hadoop.metadata.storage.memory.MemRepository;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.transport.TSocket;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
......
...@@ -68,139 +68,12 @@ ...@@ -68,139 +68,12 @@
<jersey.version>1.9</jersey.version> <jersey.version>1.9</jersey.version>
<tinkerpop.version>2.5.0</tinkerpop.version> <tinkerpop.version>2.5.0</tinkerpop.version>
<titan.version>0.5.3</titan.version> <titan.version>0.5.3</titan.version>
</properties>
<profiles>
<profile>
<id>hadoop-2</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<hadoop.version>2.5.0</hadoop.version> <hadoop.version>2.5.0</hadoop.version>
<hive.version>0.14.0</hive.version>
<falcon.version>0.7-SNAPSHOT</falcon.version>
</properties> </properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
<exclusion>
<groupId>org.glassfish</groupId>
<artifactId>javax.servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server-resourcemanager</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server-nodemanager</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-distcp</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
</dependencyManagement>
</profile>
<profiles>
<profile> <profile>
<id>test-patch</id> <id>test-patch</id>
<build> <build>
...@@ -311,6 +184,7 @@ ...@@ -311,6 +184,7 @@
<module>webapp</module> <module>webapp</module>
<module>docs</module> <module>docs</module>
<module>hivetypes</module> <module>hivetypes</module>
<module>falcontypes</module>
<module>metadata-bridge-parent</module> <module>metadata-bridge-parent</module>
</modules> </modules>
...@@ -367,6 +241,87 @@ ...@@ -367,6 +241,87 @@
<dependencyManagement> <dependencyManagement>
<dependencies> <dependencies>
<!-- hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
<exclusion>
<groupId>org.glassfish</groupId>
<artifactId>javax.servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- hive -->
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-metastore</artifactId>
<version>${hive.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
<version>0.9.2-incubating</version>
</dependency>
<dependency>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
<version>0.9.2-incubating</version>
</dependency>
<dependency>
<groupId>org.apache.falcon</groupId>
<artifactId>falcon-client</artifactId>
<version>${falcon.version}</version>
</dependency>
<!-- Logging --> <!-- Logging -->
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
...@@ -761,7 +716,6 @@ ...@@ -761,7 +716,6 @@
<forkMode>always</forkMode> <forkMode>always</forkMode>
<argLine>-Djava.security.krb5.realm= -Djava.security.krb5.kdc= <argLine>-Djava.security.krb5.realm= -Djava.security.krb5.kdc=
-Dhadoop.tmp.dir=${project.build.directory}/tmp-hadoop-${user.name}</argLine> -Dhadoop.tmp.dir=${project.build.directory}/tmp-hadoop-${user.name}</argLine>
<excludedGroups>${excluded.test.groups}</excludedGroups>
</configuration> </configuration>
<executions> <executions>
<execution> <execution>
......
...@@ -32,40 +32,12 @@ ...@@ -32,40 +32,12 @@
<name>Apache Metadata Repository</name> <name>Apache Metadata Repository</name>
<packaging>jar</packaging> <packaging>jar</packaging>
<profiles>
<profile>
<id>hadoop-2</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-client</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<classifier>tests</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<classifier>tests</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
</dependencies>
</profile>
</profiles>
<dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-common</artifactId> <artifactId>metadata-common</artifactId>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment