diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f823497 --- /dev/null +++ b/.gitignore @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Maven +target + +# IntelliJ +*.iml +*.ipr +*.iws +.idea + +# Eclipse +.classpath +.project +.settings +.externalToolBuilders +maven-eclipse.xml + +#ActiveMQ +activemq-data +build + +#log files +logs +*.log \ No newline at end of file diff --git a/InstallationSteps.txt b/InstallationSteps.txt new file mode 100644 index 0000000..b9425e4 --- /dev/null +++ b/InstallationSteps.txt @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +A. Building & Installing Metadata +================================= + +0. Prerequisites +------------------ + +You would need the following installed: + +* JDK 1.7 +* Maven 3.x + + +1. Building Metadata +-------------------- + +Building metadata from the source repository +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* git clone git@github.com:hortonworks/metadata.git metadata +* cd metadata +* export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=256m" && mvn clean verify + + +2. Deploying Metadata +--------------------- + +Once the build successfully completes, artifacts can be packaged for deployment. + +* mvn clean assembly:assembly -DskipTests -DskipITs + +Tar can be found in {project dir}/target/apache-metadata-${project.version}-bin.tar.gz + +Tar is structured as follows + +TBD + +3. Installing & running Metadata +-------------------------------- + +TBD \ No newline at end of file diff --git a/README b/README new file mode 100644 index 0000000..128fde6 --- /dev/null +++ b/README @@ -0,0 +1,24 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Metadata and Governance Overview + +This aims to provide a simple repository for storing entities and associated +relationships among other entities. + +The goal is to capture lineage for both entities and its associated instances. +It also captures provenance, lineage, classification, etc. associated with each +of the entities in th metadata repository. diff --git a/common/pom.xml b/common/pom.xml new file mode 100644 index 0000000..cbdbbbe --- /dev/null +++ b/common/pom.xml @@ -0,0 +1,153 @@ +<?xml version="1.0" encoding="UTF-8"?> + +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<project xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> + + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.apache.hadoop.metadata</groupId> + <artifactId>metadata-governance</artifactId> + <version>0.1-incubating-SNAPSHOT</version> + </parent> + <artifactId>metadata-common</artifactId> + <description>Apache Metadata Common Module</description> + <name>Apache Metadata Commons</name> + <packaging>jar</packaging> + + <profiles> + <profile> + <id>hadoop-2</id> + <activation> + <activeByDefault>true</activeByDefault> + </activation> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <classifier>tests</classifier> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <classifier>tests</classifier> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + </dependency> + </dependencies> + </profile> + </profiles> + + <dependencies> + <dependency> + <groupId>commons-el</groupId> + <artifactId>commons-el</artifactId> + </dependency> + + <dependency> + <groupId>javax.servlet.jsp</groupId> + <artifactId>jsp-api</artifactId> + </dependency> + + <dependency> + <groupId>org.testng</groupId> + <artifactId>testng</artifactId> + </dependency> + + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-log4j12</artifactId> + </dependency> + + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-api</artifactId> + </dependency> + + <dependency> + <groupId>org.codehaus.jettison</groupId> + <artifactId>jettison</artifactId> + </dependency> + + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-all</artifactId> + </dependency> + + <dependency> + <groupId>net.sourceforge.findbugs</groupId> + <artifactId>annotations</artifactId> + </dependency> + + <dependency> + <groupId>com.googlecode.json-simple</groupId> + <artifactId>json-simple</artifactId> + </dependency> + + <dependency> + <groupId>com.tinkerpop.blueprints</groupId> + <artifactId>blueprints-core</artifactId> + </dependency> + + <dependency> + <groupId>com.thinkaurelius.titan</groupId> + <artifactId>titan-core</artifactId> + </dependency> + + <dependency> + <groupId>com.thinkaurelius.titan</groupId> + <artifactId>titan-berkeleyje</artifactId> + </dependency> + </dependencies> + + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-compiler-plugin</artifactId> + <configuration> + <source>1.7</source> + <target>1.7</target> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>2.4</version> + <configuration> + <excludes> + <exclude>**/log4j.xml</exclude> + </excludes> + </configuration> + </plugin> + </plugins> + </build> +</project> diff --git a/common/src/main/java/org/apache/hadoop/metadata/MetadataException.java b/common/src/main/java/org/apache/hadoop/metadata/MetadataException.java new file mode 100644 index 0000000..8d32b08 --- /dev/null +++ b/common/src/main/java/org/apache/hadoop/metadata/MetadataException.java @@ -0,0 +1,69 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata; + +public class MetadataException extends Exception { + /** + * Constructs a new exception with the specified detail message. The + * cause is not initialized, and may subsequently be initialized by + * a call to {@link #initCause}. + * + * @param message the detail message. The detail message is saved for + * later retrieval by the {@link #getMessage()} method. + */ + public MetadataException(String message) { + super(message); + } + + /** + * Constructs a new exception with the specified detail message and + * cause. <p>Note that the detail message associated with + * {@code cause} is <i>not</i> automatically incorporated in + * this exception's detail message. + * + * @param message the detail message (which is saved for later retrieval + * by the {@link #getMessage()} method). + * @param cause the cause (which is saved for later retrieval by the + * {@link #getCause()} method). (A <tt>null</tt> value is + * permitted, and indicates that the cause is nonexistent or + * unknown.) + * @since 1.4 + */ + public MetadataException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Constructs a new exception with the specified cause and a detail + * message of <tt>(cause==null ? null : cause.toString())</tt> (which + * typically contains the class and detail message of <tt>cause</tt>). + * This constructor is useful for exceptions that are little more than + * wrappers for other throwables (for example, {@link + * java.security.PrivilegedActionException}). + * + * @param cause the cause (which is saved for later retrieval by the + * {@link #getCause()} method). (A <tt>null</tt> value is + * permitted, and indicates that the cause is nonexistent or + * unknown.) + * @since 1.4 + */ + public MetadataException(Throwable cause) { + super(cause); + } +} diff --git a/common/src/main/java/org/apache/hadoop/metadata/service/Service.java b/common/src/main/java/org/apache/hadoop/metadata/service/Service.java new file mode 100644 index 0000000..23b4e5c --- /dev/null +++ b/common/src/main/java/org/apache/hadoop/metadata/service/Service.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.service; + +import java.io.Closeable; +import java.io.IOException; + +/** + * Service interface that's initialized at startup. + */ +public interface Service extends Closeable { + + /** + * Name of the service. + * + * @return name of the service + */ + String getName(); + + /** + * Starts the service. This method blocks until the service has completely started. + * + * @throws Exception + */ + void start() throws Exception; + + /** + * Stops the service. This method blocks until the service has completely shut down. + */ + void stop(); + + /** + * A version of stop() that is designed to be usable in Java7 closure + * clauses. + * Implementation classes MUST relay this directly to {@link #stop()} + * @throws java.io.IOException never + * @throws RuntimeException on any failure during the stop operation + */ + void close() throws IOException; +} diff --git a/common/src/main/java/org/apache/hadoop/metadata/service/ServiceInitializer.java b/common/src/main/java/org/apache/hadoop/metadata/service/ServiceInitializer.java new file mode 100644 index 0000000..1120634 --- /dev/null +++ b/common/src/main/java/org/apache/hadoop/metadata/service/ServiceInitializer.java @@ -0,0 +1,76 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.service; + +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.hadoop.metadata.MetadataException; +import org.apache.hadoop.metadata.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Initializer that uses at startup to bring up all the Metadata startup services. + */ +public class ServiceInitializer { + + private static final Logger LOG = LoggerFactory.getLogger(ServiceInitializer.class); + private final Services services = Services.get(); + + public void initialize() throws MetadataException { + String[] serviceClassNames; + try { + PropertiesConfiguration configuration = + new PropertiesConfiguration("application.properties"); + serviceClassNames = configuration.getStringArray("application.services"); + } catch (ConfigurationException e) { + throw new RuntimeException("unable to get server properties"); + } + + for (String serviceClassName : serviceClassNames) { + serviceClassName = serviceClassName.trim(); + if (serviceClassName.isEmpty()) { + continue; + } + Service service = ReflectionUtils.getInstanceByClassName(serviceClassName); + services.register(service); + LOG.info("Initializing service: {}", serviceClassName); + try { + service.start(); + } catch (Throwable t) { + LOG.error("Failed to initialize service {}", serviceClassName, t); + throw new MetadataException(t); + } + LOG.info("Service initialized: {}", serviceClassName); + } + } + + public void destroy() throws MetadataException { + for (Service service : services) { + LOG.info("Destroying service: {}", service.getClass().getName()); + try { + service.stop(); + } catch (Throwable t) { + LOG.error("Failed to destroy service {}", service.getClass().getName(), t); + throw new MetadataException(t); + } + LOG.info("Service destroyed: {}", service.getClass().getName()); + } + } +} diff --git a/common/src/main/java/org/apache/hadoop/metadata/service/Services.java b/common/src/main/java/org/apache/hadoop/metadata/service/Services.java new file mode 100644 index 0000000..3079373 --- /dev/null +++ b/common/src/main/java/org/apache/hadoop/metadata/service/Services.java @@ -0,0 +1,98 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.service; + +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.hadoop.metadata.MetadataException; +import org.apache.hadoop.metadata.util.ReflectionUtils; + +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.NoSuchElementException; + +/** + * Repository of services initialized at startup. + */ +public final class Services implements Iterable<Service> { + + private static final Services INSTANCE = new Services(); + + private Services() { + } + + public static Services get() { + return INSTANCE; + } + + private final Map<String, Service> services = + new LinkedHashMap<String, Service>(); + + public synchronized void register(Service service) throws MetadataException { + + if (services.containsKey(service.getName())) { + throw new MetadataException("Service " + service.getName() + " already registered"); + } else { + services.put(service.getName(), service); + } + } + + @SuppressWarnings("unchecked") + public <T extends Service> T getService(String serviceName) { + if (services.containsKey(serviceName)) { + return (T) services.get(serviceName); + } else { + throw new NoSuchElementException( + "Service " + serviceName + " not registered with registry"); + } + } + + public boolean isRegistered(String serviceName) { + return services.containsKey(serviceName); + } + + @Override + public Iterator<Service> iterator() { + return services.values().iterator(); + } + + public Service init(String serviceName) throws MetadataException { + if (isRegistered(serviceName)) { + throw new MetadataException("Service is already initialized " + serviceName); + } + + String serviceClassName; + try { + PropertiesConfiguration configuration = + new PropertiesConfiguration("application.properties"); + serviceClassName = configuration.getString(serviceName + ".impl"); + } catch (ConfigurationException e) { + throw new MetadataException("unable to get server properties"); + } + + Service service = ReflectionUtils.getInstanceByClassName(serviceClassName); + register(service); + return service; + } + + public void reset() { + services.clear(); + } +} diff --git a/common/src/main/java/org/apache/hadoop/metadata/util/DateTimeHelper.java b/common/src/main/java/org/apache/hadoop/metadata/util/DateTimeHelper.java new file mode 100644 index 0000000..dd842d9 --- /dev/null +++ b/common/src/main/java/org/apache/hadoop/metadata/util/DateTimeHelper.java @@ -0,0 +1,120 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.util; + +import java.text.DateFormat; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.TimeZone; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Support function to parse and format date in xsd string. + */ +public final class DateTimeHelper { + + private static final String DATE_PATTERN = + "(2\\d\\d\\d|19\\d\\d)-(0[1-9]|1[012])-(0[1-9]|1[0-9]|2[0-9]|3[01])T([0-1][0-9]|2[0-3]):([0-5][0-9])Z"; + private static final Pattern PATTERN = Pattern.compile(DATE_PATTERN); + + public static final String ISO8601_FORMAT = "yyyy-MM-dd'T'HH:mm'Z'"; + + private DateTimeHelper() {} + + public static String getTimeZoneId(TimeZone tz) { + return tz.getID(); + } + + public static DateFormat getDateFormat() { + DateFormat dateFormat = new SimpleDateFormat(ISO8601_FORMAT); + dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); + return dateFormat; + } + + public static String formatDateUTC(Date date) { + return (date != null) ? getDateFormat().format(date) : null; + } + + public static Date parseDateUTC(String dateStr) { + if (!validate(dateStr)) { + throw new IllegalArgumentException(dateStr + " is not a valid UTC string"); + } + try { + return getDateFormat().parse(dateStr); + } catch (ParseException e) { + throw new RuntimeException(e); + } + } + + public static String formatDateUTCToISO8601(final String dateString, final String dateStringFormat) { + + try { + DateFormat dateFormat = new SimpleDateFormat(dateStringFormat.substring(0, dateString.length())); + dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); + return DateTimeHelper.formatDateUTC(dateFormat.parse(dateString)); + } catch (ParseException e) { + throw new RuntimeException(e); + } + } + + /** + * Validate date format with regular expression. + * + * @param date date address for validation + * @return true valid date fromat, false invalid date format + */ + public static boolean validate(final String date) { + + Matcher matcher = PATTERN.matcher(date); + + if (matcher.matches()) { + + matcher.reset(); + + if (matcher.find()) { + + int year = Integer.parseInt(matcher.group(1)); + String month = matcher.group(2); + String day = matcher.group(3); + + if (day.equals("31") + && (month.equals("4") || month.equals("6") + || month.equals("9") || month.equals("11") + || month.equals("04") || month.equals("06") || month.equals("09"))) { + return false; // only 1,3,5,7,8,10,12 has 31 days + } else if (month.equals("2") || month.equals("02")) { + // leap year + if (year % 4 == 0) { + return !(day.equals("30") || day.equals("31")); + } else { + return !(day.equals("29") || day.equals("30") || day.equals("31")); + } + } else { + return true; + } + } else { + return false; + } + } else { + return false; + } + } +} \ No newline at end of file diff --git a/common/src/main/java/org/apache/hadoop/metadata/util/ReflectionUtils.java b/common/src/main/java/org/apache/hadoop/metadata/util/ReflectionUtils.java new file mode 100644 index 0000000..c9673d8 --- /dev/null +++ b/common/src/main/java/org/apache/hadoop/metadata/util/ReflectionUtils.java @@ -0,0 +1,68 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.util; + +import org.apache.hadoop.metadata.MetadataException; + +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; + +/** + * Helper methods for class instantiation through reflection. + */ +public final class ReflectionUtils { + + private ReflectionUtils() {} + + @SuppressWarnings("unchecked") + public static <T> T getInstanceByClassName(String clazzName) throws MetadataException { + try { + Class<T> clazz = (Class<T>) ReflectionUtils.class.getClassLoader().loadClass(clazzName); + try { + return clazz.newInstance(); + } catch (IllegalAccessException e) { + Method method = clazz.getMethod("get"); + return (T) method.invoke(null); + } + } catch (Exception e) { + throw new MetadataException("Unable to get instance for " + clazzName, e); + } + } + + /** + * Invokes constructor with one argument. + * @param clazzName - classname + * @param argCls - Class of the argument + * @param arg - constructor argument + * @param <T> - instance type + * @return Class instance + * @throws MetadataException + */ + @SuppressWarnings("unchecked") + public static <T> T getInstanceByClassName(String clazzName, Class<?> argCls, + Object arg) throws MetadataException { + try { + Class<T> clazz = (Class<T>) ReflectionUtils.class.getClassLoader().loadClass(clazzName); + Constructor<T> constructor = clazz.getConstructor(argCls); + return constructor.newInstance(arg); + } catch (Exception e) { + throw new MetadataException("Unable to get instance for " + clazzName, e); + } + } +} diff --git a/common/src/main/resources/log4j.xml b/common/src/main/resources/log4j.xml new file mode 100644 index 0000000..2c73fe8 --- /dev/null +++ b/common/src/main/resources/log4j.xml @@ -0,0 +1,63 @@ +<?xml version="1.0" encoding="UTF-8" ?> +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> + +<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/"> + <appender name="console" class="org.apache.log4j.ConsoleAppender"> + <param name="Target" value="System.out"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/> + </layout> + </appender> + + <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender"> + <param name="File" value="${user.dir}/target/logs/application.log"/> + <param name="Append" value="true"/> + <param name="Threshold" value="debug"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/> + </layout> + </appender> + + <appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender"> + <param name="File" value="${user.dir}/target/logs/audit.log"/> + <param name="Append" value="true"/> + <param name="Threshold" value="debug"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %x %m%n"/> + </layout> + </appender> + + <logger name="org.apache.hadoop.metadata" additivity="false"> + <level value="debug"/> + <appender-ref ref="FILE"/> + </logger> + + <logger name="AUDIT"> + <level value="info"/> + <appender-ref ref="AUDIT"/> + </logger> + + <root> + <priority value="info"/> + <appender-ref ref="console"/> + </root> + +</log4j:configuration> diff --git a/common/src/test/resources/log4j.xml b/common/src/test/resources/log4j.xml new file mode 100644 index 0000000..853766d --- /dev/null +++ b/common/src/test/resources/log4j.xml @@ -0,0 +1,68 @@ +<?xml version="1.0" encoding="UTF-8" ?> +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> + +<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/"> + <appender name="console" class="org.apache.log4j.ConsoleAppender"> + <param name="Target" value="System.out"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/> + </layout> + </appender> + + <appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender"> + <param name="File" value="${user.dir}/target/logs/audit.log"/> + <param name="Append" value="true"/> + <param name="Threshold" value="debug"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %x %m%n"/> + </layout> + </appender> + + <appender name="METRIC" class="org.apache.log4j.DailyRollingFileAppender"> + <param name="File" value="${user.dir}/target/logs/metric.log"/> + <param name="Append" value="true"/> + <param name="Threshold" value="debug"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %m%n"/> + </layout> + </appender> + + <logger name="AUDIT" additivity="false"> + <level value="debug"/> + <appender-ref ref="console"/> + </logger> + + <logger name="METRIC" additivity="false"> + <level value="debug"/> + <appender-ref ref="console"/> + </logger> + + <logger name="org.apache.hadoop.metadata" additivity="false"> + <level value="debug"/> + <appender-ref ref="console"/> + </logger> + + <root> + <priority value="debug"/> + <appender-ref ref="console"/> + </root> + +</log4j:configuration> diff --git a/docs/pom.xml b/docs/pom.xml new file mode 100644 index 0000000..39c26d0 --- /dev/null +++ b/docs/pom.xml @@ -0,0 +1,63 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.apache.hadoop.metadata</groupId> + <artifactId>metadata-governance</artifactId> + <version>0.1-incubating-SNAPSHOT</version> + </parent> + <artifactId>metadata-docs</artifactId> + <description>Apache Metadata Documentation</description> + <name>Apache Metadata Documentation</name> + + <properties> + <skipTests>true</skipTests> + </properties> + + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-site-plugin</artifactId> + <dependencies> + <dependency> + <groupId>org.apache.maven.doxia</groupId> + <artifactId>doxia-module-twiki</artifactId> + <version>1.3</version> + </dependency> + </dependencies> + <executions> + <execution> + <goals> + <goal>site</goal> + </goals> + <phase>prepare-package</phase> + </execution> + </executions> + <configuration> + <generateProjectInfo>false</generateProjectInfo> + <generateReports>false</generateReports> + <skip>false</skip> + </configuration> + </plugin> + </plugins> + </build> +</project> diff --git a/docs/src/site/resources/images/accessories-text-editor.png b/docs/src/site/resources/images/accessories-text-editor.png new file mode 100644 index 0000000..abc3366 Binary files /dev/null and b/docs/src/site/resources/images/accessories-text-editor.png differ diff --git a/docs/src/site/resources/images/add.gif b/docs/src/site/resources/images/add.gif new file mode 100644 index 0000000..1cb3dbf Binary files /dev/null and b/docs/src/site/resources/images/add.gif differ diff --git a/docs/src/site/resources/images/apache-incubator-logo.png b/docs/src/site/resources/images/apache-incubator-logo.png new file mode 100644 index 0000000..81fb31e Binary files /dev/null and b/docs/src/site/resources/images/apache-incubator-logo.png differ diff --git a/docs/src/site/resources/images/apache-maven-project-2.png b/docs/src/site/resources/images/apache-maven-project-2.png new file mode 100644 index 0000000..6c096ec Binary files /dev/null and b/docs/src/site/resources/images/apache-maven-project-2.png differ diff --git a/docs/src/site/resources/images/application-certificate.png b/docs/src/site/resources/images/application-certificate.png new file mode 100644 index 0000000..cc6aff6 Binary files /dev/null and b/docs/src/site/resources/images/application-certificate.png differ diff --git a/docs/src/site/resources/images/contact-new.png b/docs/src/site/resources/images/contact-new.png new file mode 100644 index 0000000..ebc4316 Binary files /dev/null and b/docs/src/site/resources/images/contact-new.png differ diff --git a/docs/src/site/resources/images/document-properties.png b/docs/src/site/resources/images/document-properties.png new file mode 100644 index 0000000..34c2409 Binary files /dev/null and b/docs/src/site/resources/images/document-properties.png differ diff --git a/docs/src/site/resources/images/drive-harddisk.png b/docs/src/site/resources/images/drive-harddisk.png new file mode 100644 index 0000000..d7ce475 Binary files /dev/null and b/docs/src/site/resources/images/drive-harddisk.png differ diff --git a/docs/src/site/resources/images/falcon-logo.png b/docs/src/site/resources/images/falcon-logo.png new file mode 100644 index 0000000..0a9f6cf Binary files /dev/null and b/docs/src/site/resources/images/falcon-logo.png differ diff --git a/docs/src/site/resources/images/fix.gif b/docs/src/site/resources/images/fix.gif new file mode 100644 index 0000000..b7eb3dc Binary files /dev/null and b/docs/src/site/resources/images/fix.gif differ diff --git a/docs/src/site/resources/images/icon_error_sml.gif b/docs/src/site/resources/images/icon_error_sml.gif new file mode 100644 index 0000000..12e9a01 Binary files /dev/null and b/docs/src/site/resources/images/icon_error_sml.gif differ diff --git a/docs/src/site/resources/images/icon_help_sml.gif b/docs/src/site/resources/images/icon_help_sml.gif new file mode 100644 index 0000000..aaf20e6 Binary files /dev/null and b/docs/src/site/resources/images/icon_help_sml.gif differ diff --git a/docs/src/site/resources/images/icon_info_sml.gif b/docs/src/site/resources/images/icon_info_sml.gif new file mode 100644 index 0000000..b776326 Binary files /dev/null and b/docs/src/site/resources/images/icon_info_sml.gif differ diff --git a/docs/src/site/resources/images/icon_success_sml.gif b/docs/src/site/resources/images/icon_success_sml.gif new file mode 100644 index 0000000..0a19527 Binary files /dev/null and b/docs/src/site/resources/images/icon_success_sml.gif differ diff --git a/docs/src/site/resources/images/icon_warning_sml.gif b/docs/src/site/resources/images/icon_warning_sml.gif new file mode 100644 index 0000000..ac6ad6a Binary files /dev/null and b/docs/src/site/resources/images/icon_warning_sml.gif differ diff --git a/docs/src/site/resources/images/image-x-generic.png b/docs/src/site/resources/images/image-x-generic.png new file mode 100644 index 0000000..ab49efb Binary files /dev/null and b/docs/src/site/resources/images/image-x-generic.png differ diff --git a/docs/src/site/resources/images/internet-web-browser.png b/docs/src/site/resources/images/internet-web-browser.png new file mode 100644 index 0000000..307d6ac Binary files /dev/null and b/docs/src/site/resources/images/internet-web-browser.png differ diff --git a/docs/src/site/resources/images/logos/build-by-maven-black.png b/docs/src/site/resources/images/logos/build-by-maven-black.png new file mode 100644 index 0000000..919fd0f Binary files /dev/null and b/docs/src/site/resources/images/logos/build-by-maven-black.png differ diff --git a/docs/src/site/resources/images/logos/build-by-maven-white.png b/docs/src/site/resources/images/logos/build-by-maven-white.png new file mode 100644 index 0000000..7d44c9c Binary files /dev/null and b/docs/src/site/resources/images/logos/build-by-maven-white.png differ diff --git a/docs/src/site/resources/images/logos/maven-feather.png b/docs/src/site/resources/images/logos/maven-feather.png new file mode 100644 index 0000000..b5ada83 Binary files /dev/null and b/docs/src/site/resources/images/logos/maven-feather.png differ diff --git a/docs/src/site/resources/images/network-server.png b/docs/src/site/resources/images/network-server.png new file mode 100644 index 0000000..1d12e19 Binary files /dev/null and b/docs/src/site/resources/images/network-server.png differ diff --git a/docs/src/site/resources/images/package-x-generic.png b/docs/src/site/resources/images/package-x-generic.png new file mode 100644 index 0000000..8b7e9e6 Binary files /dev/null and b/docs/src/site/resources/images/package-x-generic.png differ diff --git a/docs/src/site/resources/images/profiles/pre-release.png b/docs/src/site/resources/images/profiles/pre-release.png new file mode 100644 index 0000000..d448e85 Binary files /dev/null and b/docs/src/site/resources/images/profiles/pre-release.png differ diff --git a/docs/src/site/resources/images/profiles/retired.png b/docs/src/site/resources/images/profiles/retired.png new file mode 100644 index 0000000..f89f6a2 Binary files /dev/null and b/docs/src/site/resources/images/profiles/retired.png differ diff --git a/docs/src/site/resources/images/profiles/sandbox.png b/docs/src/site/resources/images/profiles/sandbox.png new file mode 100644 index 0000000..f88b362 Binary files /dev/null and b/docs/src/site/resources/images/profiles/sandbox.png differ diff --git a/docs/src/site/resources/images/remove.gif b/docs/src/site/resources/images/remove.gif new file mode 100644 index 0000000..fc65631 Binary files /dev/null and b/docs/src/site/resources/images/remove.gif differ diff --git a/docs/src/site/resources/images/rss.png b/docs/src/site/resources/images/rss.png new file mode 100644 index 0000000..a9850ee Binary files /dev/null and b/docs/src/site/resources/images/rss.png differ diff --git a/docs/src/site/resources/images/update.gif b/docs/src/site/resources/images/update.gif new file mode 100644 index 0000000..b2a6d0b Binary files /dev/null and b/docs/src/site/resources/images/update.gif differ diff --git a/docs/src/site/resources/images/window-new.png b/docs/src/site/resources/images/window-new.png new file mode 100644 index 0000000..0e12ef9 Binary files /dev/null and b/docs/src/site/resources/images/window-new.png differ diff --git a/docs/src/site/site.xml b/docs/src/site/site.xml new file mode 100644 index 0000000..be4465f --- /dev/null +++ b/docs/src/site/site.xml @@ -0,0 +1,69 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> +<project name="Metadata and Governance" xmlns="http://maven.apache.org/DECORATION/1.3.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/DECORATION/1.3.0 http://maven.apache.org/xsd/decoration-1.3.0.xsd"> + + <skin> + <groupId>org.apache.maven.skins</groupId> + <artifactId>maven-fluido-skin</artifactId> + <version>1.3.0</version> + </skin> + + <custom> + <fluidoSkin> + <project>Apache Metadata and Governance</project> + <sideBarEnabled>false</sideBarEnabled> + </fluidoSkin> + </custom> + + <bannerLeft> + <name>DGC - Metadata</name> + <src>./images/metadata-logo.png</src> + <width>200px</width> + <height>45px</height> + </bannerLeft> + + <bannerRight> + <name>Apache Incubator</name> + <src>./images/apache-incubator-logo.png</src> + <href>http://incubator.apache.org</href> + </bannerRight> + + <publishDate position="right"/> + <version position="right"/> + + <body> + <head> + <script type="text/javascript"> + $( document ).ready( function() { $( '.carousel' ).carousel( { interval: 3500 } ) } ); + </script> + </head> + + <breadcrumbs position="left"> + <item name="MetadataGovernance" title="Apache Metadata and Governance" href="index.html"/> + </breadcrumbs> + + <footer> + © 2011-2012 The Apache Software Foundation. Apache Metadata and Governance, Apache, + the Apache feather logo, and the Apache Metadata and Governance project logo are + trademarks of The Apache Software Foundation. + </footer> + </body> +</project> \ No newline at end of file diff --git a/docs/src/site/twiki/index.twiki b/docs/src/site/twiki/index.twiki new file mode 100644 index 0000000..0c467ae --- /dev/null +++ b/docs/src/site/twiki/index.twiki @@ -0,0 +1,18 @@ +---+ Data Governance and Metadata platform for Hadoop + + +---++ Why? + + * + + * Captures Lineage information for data sets and processes + +---+ Getting Started + + +#LicenseInfo +---+ Licensing Information + +Metadata (DGC) is distributed under [[http://www.apache.org/licenses/LICENSE-2.0][Apache License 2.0]]. + + diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..cc8967e --- /dev/null +++ b/pom.xml @@ -0,0 +1,772 @@ +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"> + + <parent> + <groupId>org.apache</groupId> + <artifactId>apache</artifactId> + <version>13</version> + </parent> + + <modelVersion>4.0.0</modelVersion> + <groupId>org.apache.hadoop.metadata</groupId> + <artifactId>metadata-governance</artifactId> + <version>0.1-incubating-SNAPSHOT</version> + <description>Metadata Management and Data Governance Platform over Hadoop</description> + <name>metadata-governance</name> + <packaging>pom</packaging> + <url>http://www.apache.org/</url> + + <licenses> + <license> + <name>The Apache Software License, Version 2.0</name> + <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> + </license> + </licenses> + + <organization> + <name>Apache Software Foundation</name> + <url>http://www.apache.org</url> + </organization> + + <issueManagement> + <system>JIRA</system> + <url>https://issues.apache.org/jira/browse/XXX</url> + </issueManagement> + + <ciManagement> + <system>Jenkins</system> + <url>https://builds.apache.org/job/XXX</url> + </ciManagement> + + <inceptionYear>2014</inceptionYear> + + <properties> + <!-- platform encoding override --> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> + + <slf4j.version>1.7.7</slf4j.version> + <jetty.version>6.1.26</jetty.version> + <jersey.version>1.9</jersey.version> + <blueprints-core.version>2.5.0</blueprints-core.version> + <titan.version>0.5.2</titan.version> + </properties> + + <profiles> + <profile> + <id>hadoop-2</id> + <activation> + <activeByDefault>true</activeByDefault> + </activation> + <properties> + <hadoop.version>2.5.0</hadoop.version> + </properties> + <dependencyManagement> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + <version>${hadoop.version}</version> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-server</artifactId> + </exclusion> + <exclusion> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-core</artifactId> + </exclusion> + <exclusion> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-json</artifactId> + </exclusion> + <exclusion> + <groupId>org.glassfish</groupId> + <artifactId>javax.servlet</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-annotations</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-annotations</artifactId> + </exclusion> + </exclusions> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <version>${hadoop.version}</version> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-annotations</artifactId> + </exclusion> + </exclusions> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-common</artifactId> + <version>${hadoop.version}</version> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-annotations</artifactId> + </exclusion> + </exclusions> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + <classifier>tests</classifier> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <version>${hadoop.version}</version> + <classifier>tests</classifier> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-server-resourcemanager</artifactId> + <version>${hadoop.version}</version> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-server-nodemanager</artifactId> + <version>${hadoop.version}</version> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-auth</artifactId> + <version>${hadoop.version}</version> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-annotations</artifactId> + </exclusion> + </exclusions> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-distcp</artifactId> + <version>${hadoop.version}</version> + <scope>provided</scope> + </dependency> + </dependencies> + </dependencyManagement> + </profile> + + <profile> + <id>test-patch</id> + <build> + <plugins> + <plugin> + <groupId>org.apache.rat</groupId> + <artifactId>apache-rat-plugin</artifactId> + <configuration> + <useDefaultExcludes>true</useDefaultExcludes> + <useMavenDefaultExcludes>true</useMavenDefaultExcludes> + <useIdeaDefaultExcludes>true</useIdeaDefaultExcludes> + <useEclipseDefaultExcludes>true</useEclipseDefaultExcludes> + <excludeSubProjects>true</excludeSubProjects> + <excludes> + <exclude>*.txt</exclude> + <exclude>**/*.txt</exclude> + <exclude>.git/**</exclude> + <exclude>**/.idea/**</exclude> + <exclude>**/*.twiki</exclude> + <exclude>**/*.iml</exclude> + <exclude>**/target/**</exclude> + <exclude>**/activemq-data/**</exclude> + <exclude>**/build/**</exclude> + <exclude>**/*.patch</exclude> + <exclude>derby.log</exclude> + <exclude>**/logs/**</exclude> + <exclude>**/.classpath</exclude> + <exclude>**/.project</exclude> + <exclude>**/.settings/**</exclude> + <exclude>**/test-output/**</exclude> + <exclude>**/data.txt</exclude> + <exclude>**/maven-eclipse.xml</exclude> + <exclude>**/.externalToolBuilders/**</exclude> + </excludes> + </configuration> + <executions> + <execution> + <id>rat-check</id> + <goals> + <goal>check</goal> + </goals> + <phase>verify</phase> + </execution> + </executions> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-checkstyle-plugin</artifactId> +<!-- + <dependencies> + <dependency> + <groupId>org.apache.hadoop.metadata</groupId> + <artifactId>checkstyle</artifactId> + <version>${project.version}</version> + </dependency> + </dependencies> +--> + <executions> + <execution> + <id>checkstyle-check</id> + <goals> + <goal>check</goal> + </goals> + <phase>verify</phase> + <configuration> + <consoleOutput>true</consoleOutput> + <includeTestSourceDirectory>true</includeTestSourceDirectory> + <configLocation>build/checkstyle.xml</configLocation> + <headerLocation>build/checkstyle-java-header.txt</headerLocation> + <failOnViolation>true</failOnViolation> + </configuration> + </execution> + </executions> + </plugin> + + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>findbugs-maven-plugin</artifactId> + <configuration> + <!--debug>true</debug --> + <xmlOutput>true</xmlOutput> + <excludeFilterFile>${basedir}/../build/checkstyle/findbugs-exclude.xml</excludeFilterFile> + <failOnError>true</failOnError> + </configuration> + <executions> + <execution> + <id>findbugs-check</id> + <goals> + <goal>check</goal> + </goals> + <phase>verify</phase> + </execution> + </executions> + </plugin> + <!-- Source code metrics: mvn javancss:report or mvn site --> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>javancss-maven-plugin</artifactId> + </plugin> + </plugins> + </build> + <properties> + <excluded.test.groups/> + </properties> + </profile> + </profiles> + + <modules> + <module>common</module> + <module>repository</module> + <module>webapp</module> + <module>docs</module> + </modules> + + <repositories> + <repository> + <id>central</id> + <url>http://repo1.maven.org/maven2</url> + <snapshots> + <enabled>false</enabled> + </snapshots> + </repository> + <repository> + <id>hortonworks.repo</id> + <url>http://repo.hortonworks.com/content/repositories/releases</url> + <name>Hortonworks Repo</name> + <snapshots> + <enabled>false</enabled> + </snapshots> + </repository> + <repository> + <id>Codehaus repository</id> + <url>http://repository.codehaus.org/</url> + <snapshots> + <enabled>false</enabled> + </snapshots> + </repository> + <repository> + <id>apache.snapshots.repo</id> + <url>https://repository.apache.org/content/groups/snapshots</url> + <name>Apache Snapshots Repository</name> + <snapshots> + <enabled>true</enabled> + </snapshots> + </repository> + <repository> + <id>apache-staging</id> + <url>https://repository.apache.org/content/groups/staging/</url> + </repository> + <repository> + <id>default</id> + <url>https://repository.apache.org/content/groups/public/</url> + </repository> + <repository> + <id>java.net-Public</id> + <name>Maven Java Net Snapshots and Releases</name> + <url>https://maven.java.net/content/groups/public/</url> + </repository> + <repository> + <id>repository.jboss.org-public</id> + <name>JBoss repository</name> + <url>https://repository.jboss.org/nexus/content/groups/public</url> + </repository> + </repositories> + + <dependencyManagement> + <dependencies> + <!-- Logging --> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-api</artifactId> + <version>${slf4j.version}</version> + </dependency> + + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-log4j12</artifactId> + <version>${slf4j.version}</version> + </dependency> + + <dependency> + <groupId>log4j</groupId> + <artifactId>log4j</artifactId> + <version>1.2.17</version> + <scope>compile</scope> + <exclusions> + <exclusion> + <groupId>com.sun.jdmk</groupId> + <artifactId>jmxtools</artifactId> + </exclusion> + <exclusion> + <groupId>com.sun.jmx</groupId> + <artifactId>jmxri</artifactId> + </exclusion> + <exclusion> + <groupId>javax.mail</groupId> + <artifactId>mail</artifactId> + </exclusion> + <exclusion> + <groupId>javax.jms</groupId> + <artifactId>jmx</artifactId> + </exclusion> + <exclusion> + <groupId>javax.jms</groupId> + <artifactId>jms</artifactId> + </exclusion> + </exclusions> + </dependency> + + <!-- commons --> + <dependency> + <groupId>commons-configuration</groupId> + <artifactId>commons-configuration</artifactId> + <version>1.10</version> + </dependency> + + <dependency> + <groupId>commons-cli</groupId> + <artifactId>commons-cli</artifactId> + <version>1.2</version> + </dependency> + + <dependency> + <groupId>commons-el</groupId> + <artifactId>commons-el</artifactId> + <version>1.0</version> + </dependency> + + <!-- utilities --> + <dependency> + <groupId>com.google.inject</groupId> + <artifactId>guice</artifactId> + <version>3.0</version> + </dependency> + + <dependency> + <groupId>joda-time</groupId> + <artifactId>joda-time</artifactId> + <version>2.5</version> + </dependency> + + <!-- Jersey --> + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-client</artifactId> + <version>${jersey.version}</version> + </dependency> + + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-json</artifactId> + <version>${jersey.version}</version> + </dependency> + + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-server</artifactId> + <version>${jersey.version}</version> + </dependency> + + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-core</artifactId> + <version>${jersey.version}</version> + </dependency> + + <dependency> + <groupId>javax.servlet.jsp</groupId> + <artifactId>jsp-api</artifactId> + <version>2.0</version> + </dependency> + + <!-- JSON --> + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-core-asl</artifactId> + <version>1.5.2</version> + </dependency> + + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-mapper-asl</artifactId> + <version>1.5.2</version> + </dependency> + + <dependency> + <groupId>org.codehaus.jettison</groupId> + <artifactId>jettison</artifactId> + <version>1.3</version> + </dependency> + + <dependency> + <groupId>com.googlecode.json-simple</groupId> + <artifactId>json-simple</artifactId> + <version>1.1.1</version> + <exclusions> + <exclusion> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + </exclusion> + </exclusions> + </dependency> + + <!-- Jetty --> + <dependency> + <groupId>org.mortbay.jetty</groupId> + <artifactId>jetty</artifactId> + <version>${jetty.version}</version> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>org.mortbay.jetty</groupId> + <artifactId>jetty-plus</artifactId> + <version>${jetty.version}</version> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>net.sourceforge.findbugs</groupId> + <artifactId>annotations</artifactId> + <version>1.3.2</version> + </dependency> + + <!-- Graph DB --> + <dependency> + <groupId>com.tinkerpop.blueprints</groupId> + <artifactId>blueprints-core</artifactId> + <version>${blueprints-core.version}</version> + </dependency> + + <dependency> + <groupId>com.thinkaurelius.titan</groupId> + <artifactId>titan-core</artifactId> + <version>${titan.version}</version> + <exclusions> + <!-- rexster does not work with servlet-api --> + <exclusion> + <groupId>com.tinkerpop.rexster</groupId> + <artifactId>rexster-core</artifactId> + </exclusion> + <exclusion> + <groupId>com.tinkerpop.rexster</groupId> + <artifactId>rexster-server</artifactId> + </exclusion> + <!-- asm 4.0 does not work with jersey asm 3.1 --> + <exclusion> + <groupId>com.tinkerpop</groupId> + <artifactId>frames</artifactId> + </exclusion> + <exclusion> + <groupId>com.esotericsoftware.reflectasm</groupId> + <artifactId>reflectasm</artifactId> + </exclusion> + <exclusion> + <groupId>org.ow2.asm</groupId> + <artifactId>asm</artifactId> + </exclusion> + </exclusions> + </dependency> + + <dependency> + <groupId>com.thinkaurelius.titan</groupId> + <artifactId>titan-berkeleyje</artifactId> + <version>${titan.version}</version> + </dependency> + + <dependency> + <groupId>com.thinkaurelius.titan</groupId> + <artifactId>titan-es</artifactId> + <version>${titan.version}</version> + </dependency> + + <!-- metadata modules --> + <dependency> + <groupId>org.apache.hadoop.metadata</groupId> + <artifactId>metadata-common</artifactId> + <version>${project.version}</version> + </dependency> + + <dependency> + <groupId>org.apache.hadoop.metadata</groupId> + <artifactId>metadata-repository</artifactId> + <version>${project.version}</version> + </dependency> + + <!--Test dependencies--> + <dependency> + <groupId>org.testng</groupId> + <artifactId>testng</artifactId> + <version>6.1.1</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.easymock</groupId> + <artifactId>easymock</artifactId> + <version>2.4</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-all</artifactId> + <version>1.8.5</version> + <scope>provided</scope> + </dependency> + </dependencies> + </dependencyManagement> + + <build> + <finalName>metadata-governance</finalName> + <pluginManagement> + <plugins> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>buildnumber-maven-plugin</artifactId> + <version>1.0</version> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-compiler-plugin</artifactId> + <version>2.3.2</version> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-source-plugin</artifactId> + <version>2.2.1</version> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-javadoc-plugin</artifactId> + <version>2.8.1</version> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <version>2.16</version> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-failsafe-plugin</artifactId> + <version>2.16</version> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-deploy-plugin</artifactId> + <version>2.7</version> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-war-plugin</artifactId> + <version>2.1.1</version> + </plugin> + + <plugin> + <groupId>org.apache.rat</groupId> + <artifactId>apache-rat-plugin</artifactId> + <version>0.7</version> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-checkstyle-plugin</artifactId> + <version>2.9.1</version> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-site-plugin</artifactId> + <version>3.2</version> + </plugin> + + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>findbugs-maven-plugin</artifactId> + <version>2.5.2</version> + </plugin> + <!-- Source code metrics: mvn javancss:report or mvn site --> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>javancss-maven-plugin</artifactId> + <version>2.0</version> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-dependency-plugin</artifactId> + <version>2.8</version> + </plugin> + </plugins> + </pluginManagement> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-compiler-plugin</artifactId> + <version>3.1</version> + <configuration> + <source>1.7</source> + <target>1.7</target> + <optimize>true</optimize> + </configuration> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <version>2.7.2</version> + <configuration> + <systemPropertyVariables> + <tapestry.execution-mode>Qa</tapestry.execution-mode> + </systemPropertyVariables> + </configuration> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-failsafe-plugin</artifactId> + <version>2.16</version> + <configuration> + <redirectTestOutputToFile>true</redirectTestOutputToFile> + <forkMode>always</forkMode> + <argLine>-Djava.security.krb5.realm= -Djava.security.krb5.kdc= + -Dhadoop.tmp.dir=${project.build.directory}/tmp-hadoop-${user.name}</argLine> + <excludedGroups>${excluded.test.groups}</excludedGroups> + </configuration> + <executions> + <execution> + <id>integration-test</id> + <goals> + <goal>integration-test</goal> + </goals> + </execution> + <execution> + <id>verify</id> + <goals> + <goal>verify</goal> + </goals> + </execution> + </executions> + </plugin> + + <!-- Run the application using "mvn jetty:run" --> + <plugin> + <groupId>org.mortbay.jetty</groupId> + <artifactId>maven-jetty-plugin</artifactId> + <version>6.1.16</version> + <configuration> + <!-- Log to the console. --> + <requestLog implementation="org.mortbay.jetty.NCSARequestLog"> + <!-- This doesn't do anything for Jetty, but is a workaround for a Maven bug + that prevents the requestLog from being set. --> + <append>true</append> + </requestLog> + </configuration> + </plugin> + </plugins> + </build> + + <reporting/> +</project> diff --git a/repository/pom.xml b/repository/pom.xml new file mode 100644 index 0000000..1a2270f --- /dev/null +++ b/repository/pom.xml @@ -0,0 +1,153 @@ +<?xml version="1.0" encoding="UTF-8"?> + +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<project xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> + + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.apache.hadoop.metadata</groupId> + <artifactId>metadata-governance</artifactId> + <version>0.1-incubating-SNAPSHOT</version> + </parent> + <artifactId>metadata-repository</artifactId> + <description>Apache Metadata Repository Module</description> + <name>Apache Metadata Repository</name> + <packaging>jar</packaging> + + <profiles> + <profile> + <id>hadoop-2</id> + <activation> + <activeByDefault>true</activeByDefault> + </activation> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <classifier>tests</classifier> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <classifier>tests</classifier> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + </dependency> + </dependencies> + </profile> + </profiles> + + <dependencies> + <dependency> + <groupId>org.apache.hadoop.metadata</groupId> + <artifactId>metadata-common</artifactId> + </dependency> + + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-log4j12</artifactId> + </dependency> + + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-api</artifactId> + </dependency> + + <dependency> + <groupId>com.google.inject</groupId> + <artifactId>guice</artifactId> + </dependency> + + <dependency> + <groupId>org.codehaus.jettison</groupId> + <artifactId>jettison</artifactId> + </dependency> + + <dependency> + <groupId>com.googlecode.json-simple</groupId> + <artifactId>json-simple</artifactId> + </dependency> + + <dependency> + <groupId>com.tinkerpop.blueprints</groupId> + <artifactId>blueprints-core</artifactId> + </dependency> + + <dependency> + <groupId>com.thinkaurelius.titan</groupId> + <artifactId>titan-core</artifactId> + </dependency> + + <dependency> + <groupId>com.thinkaurelius.titan</groupId> + <artifactId>titan-berkeleyje</artifactId> + </dependency> + + <dependency> + <groupId>com.thinkaurelius.titan</groupId> + <artifactId>titan-es</artifactId> + </dependency> + + <dependency> + <groupId>org.testng</groupId> + <artifactId>testng</artifactId> + </dependency> + + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-all</artifactId> + </dependency> + </dependencies> + + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-compiler-plugin</artifactId> + <configuration> + <source>1.7</source> + <target>1.7</target> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>2.4</version> + <configuration> + <excludes> + <exclude>**/log4j.xml</exclude> + </excludes> + </configuration> + </plugin> + </plugins> + </build> +</project> diff --git a/repository/src/main/java/org/apache/hadoop/metadata/RepositoryMetadataModule.java b/repository/src/main/java/org/apache/hadoop/metadata/RepositoryMetadataModule.java new file mode 100644 index 0000000..231b10f --- /dev/null +++ b/repository/src/main/java/org/apache/hadoop/metadata/RepositoryMetadataModule.java @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * Created by IntelliJ IDEA. + * User: seetharam + * Date: 12/1/14 + * Time: 2:21 PM + */ +package org.apache.hadoop.metadata; + +/** + * Guice module for Repository module. + */ +public class RepositoryMetadataModule extends com.google.inject.AbstractModule { + + protected void configure() { + // add configuration logic here + } +} diff --git a/repository/src/main/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryService.java b/repository/src/main/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryService.java new file mode 100644 index 0000000..f5d6286 --- /dev/null +++ b/repository/src/main/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryService.java @@ -0,0 +1,151 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.services; + +import com.google.common.base.Preconditions; +import com.tinkerpop.blueprints.Graph; +import com.tinkerpop.blueprints.TransactionalGraph; +import com.tinkerpop.blueprints.Vertex; +import org.apache.hadoop.metadata.service.Services; +import org.apache.hadoop.metadata.util.GraphUtils; +import org.json.simple.JSONValue; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +/** + * An implementation backed by Titan Graph DB. + */ +public class GraphBackedMetadataRepositoryService implements MetadataRepositoryService { + + private static final Logger LOG = + LoggerFactory.getLogger(GraphBackedMetadataRepositoryService.class); + public static final String NAME = GraphBackedMetadataRepositoryService.class.getSimpleName(); + + private GraphService graphService; + + /** + * Name of the service. + * + * @return name of the service + */ + @Override + public String getName() { + return NAME; + } + + /** + * Starts the service. This method blocks until the service has completely started. + * + * @throws Exception + */ + @Override + public void start() throws Exception { + if (Services.get().isRegistered(TitanGraphService.NAME)) { + graphService = Services.get().getService(TitanGraphService.NAME); + } else { + throw new RuntimeException("graph service is not initialized"); + } + } + + /** + * Stops the service. This method blocks until the service has completely shut down. + */ + @Override + public void stop() { + // do nothing + graphService = null; + } + + /** + * A version of stop() that is designed to be usable in Java7 closure + * clauses. + * Implementation classes MUST relay this directly to {@link #stop()} + * + * @throws java.io.IOException never + * @throws RuntimeException on any failure during the stop operation + */ + @Override + public void close() throws IOException { + stop(); + } + + private Graph getBlueprintsGraph() { + return graphService.getBlueprintsGraph(); + } + + private TransactionalGraph getTransactionalGraph() { + return graphService.getTransactionalGraph(); + } + + @Override + public String submitEntity(String entity, String entityType) { + LOG.info("adding entity={} type={}", entity, entityType); + @SuppressWarnings("unchecked") + Map<String, String> properties = (Map<String, String>) JSONValue.parse(entity); + + final String entityName = properties.get("entityName"); + Preconditions.checkNotNull(entityName, "entity name cannot be null"); + + // todo check if this is a duplicate + + final String guid = UUID.randomUUID().toString(); + final TransactionalGraph transactionalGraph = getTransactionalGraph(); + try { + transactionalGraph.rollback(); + + Vertex entityVertex = transactionalGraph.addVertex(null); + entityVertex.setProperty("guid", guid); + entityVertex.setProperty("entityName", entityName); + entityVertex.setProperty("entityType", entityType); + for (Map.Entry<String, String> entry : properties.entrySet()) { + entityVertex.setProperty(entry.getKey(), entry.getValue()); + } + } catch (Exception e) { + transactionalGraph.rollback(); + } finally { + transactionalGraph.commit(); + } + + return guid; + } + + @Override + public String getEntityDefinition(String entityName, String entityType) { + LOG.info("Retrieving entity name={} type={}", entityName, entityType); + Vertex entityVertex = GraphUtils.findVertex(getBlueprintsGraph(), entityName, entityType); + if (entityVertex == null) { + return null; + } + + Map<String, String> properties = GraphUtils.extractProperties(entityVertex); + return JSONValue.toJSONString(properties); + } + + @Override + public List<String> getEntityList(String entityType) { + LOG.info("Retrieving entity list for type={}", entityType); + return Collections.emptyList(); + } +} diff --git a/repository/src/main/java/org/apache/hadoop/metadata/services/GraphService.java b/repository/src/main/java/org/apache/hadoop/metadata/services/GraphService.java new file mode 100644 index 0000000..92654d8 --- /dev/null +++ b/repository/src/main/java/org/apache/hadoop/metadata/services/GraphService.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.services; + +import com.tinkerpop.blueprints.Graph; +import com.tinkerpop.blueprints.KeyIndexableGraph; +import com.tinkerpop.blueprints.TransactionalGraph; +import org.apache.hadoop.metadata.service.Service; + +import java.util.Set; + +/** + * A blueprints based graph service. + */ +public interface GraphService extends Service { + + /** + * Returns an handle to the graph db. + * + * @return an handle to the graph db + */ + Graph getBlueprintsGraph(); + + KeyIndexableGraph getIndexableGraph(); + + TransactionalGraph getTransactionalGraph(); + + Set<String> getVertexIndexedKeys(); + + Set<String> getEdgeIndexedKeys(); +} diff --git a/repository/src/main/java/org/apache/hadoop/metadata/services/MetadataRepositoryService.java b/repository/src/main/java/org/apache/hadoop/metadata/services/MetadataRepositoryService.java new file mode 100644 index 0000000..7ce4e6c --- /dev/null +++ b/repository/src/main/java/org/apache/hadoop/metadata/services/MetadataRepositoryService.java @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.services; + +import org.apache.hadoop.metadata.service.Service; + +import java.util.List; + +/** + * An interface for persisting metadata into a blueprints enabled graph db. + */ +public interface MetadataRepositoryService extends Service { + + String submitEntity(String entity, String entityType); + + String getEntityDefinition(String entityName, String entityType); + + List<String> getEntityList(String entityType); +} diff --git a/repository/src/main/java/org/apache/hadoop/metadata/services/TitanGraphService.java b/repository/src/main/java/org/apache/hadoop/metadata/services/TitanGraphService.java new file mode 100644 index 0000000..469d37e --- /dev/null +++ b/repository/src/main/java/org/apache/hadoop/metadata/services/TitanGraphService.java @@ -0,0 +1,170 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.services; + +import com.thinkaurelius.titan.core.TitanFactory; +import com.thinkaurelius.titan.core.TitanGraph; +import com.tinkerpop.blueprints.Edge; +import com.tinkerpop.blueprints.Graph; +import com.tinkerpop.blueprints.KeyIndexableGraph; +import com.tinkerpop.blueprints.TransactionalGraph; +import com.tinkerpop.blueprints.Vertex; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Iterator; +import java.util.Set; + +/** + * Default implementation for Graph service backed by Titan. + */ +public class TitanGraphService implements GraphService { + + private static final Logger LOG = LoggerFactory.getLogger(TitanGraphService.class); + public static final String NAME = TitanGraphService.class.getSimpleName(); + + /** + * Constant for the configuration property that indicates the prefix. + */ + private static final String METADATA_PREFIX = "metadata.graph."; + + private TitanGraph titanGraph; + private Set<String> vertexIndexedKeys; + private Set<String> edgeIndexedKeys; + + /** + * Name of the service. + * + * @return name of the service + */ + @Override + public String getName() { + return NAME; + } + + /** + * Starts the service. This method blocks until the service has completely started. + * + * @throws Exception + */ + @Override + public void start() throws Exception { + Configuration graphConfig = getConfiguration(); + titanGraph = initializeGraphDB(graphConfig); + + createIndicesForVertexKeys(); + // todo - create Edge Cardinality Constraints + LOG.info("Initialized titanGraph db: {}", titanGraph); + + vertexIndexedKeys = getIndexableGraph().getIndexedKeys(Vertex.class); + LOG.info("Init vertex property keys: {}", vertexIndexedKeys); + + edgeIndexedKeys = getIndexableGraph().getIndexedKeys(Edge.class); + LOG.info("Init edge property keys: {}", edgeIndexedKeys); + } + + private static Configuration getConfiguration() throws ConfigurationException { + PropertiesConfiguration configProperties = + new PropertiesConfiguration("application.properties"); + + Configuration graphConfig = new PropertiesConfiguration(); + final Iterator<String> iterator = configProperties.getKeys(); + while (iterator.hasNext()) { + String key = iterator.next(); + if (key.startsWith(METADATA_PREFIX)) { + String value = (String) configProperties.getProperty(key); + key = key.substring(METADATA_PREFIX.length()); + graphConfig.setProperty(key, value); + } + } + + return graphConfig; + } + + protected TitanGraph initializeGraphDB(Configuration graphConfig) { + LOG.info("Initializing titanGraph db"); + return TitanFactory.open(graphConfig); + } + + protected void createIndicesForVertexKeys() { + if (!titanGraph.getIndexedKeys(Vertex.class).isEmpty()) { + LOG.info("Indexes already exist for titanGraph"); + return; + } + + LOG.info("Indexes does not exist, Creating indexes for titanGraph"); + // todo - add index for vertex and edge property keys + } + + /** + * Stops the service. This method blocks until the service has completely shut down. + */ + @Override + public void stop() { + if (titanGraph != null) { + titanGraph.shutdown(); + } + } + + /** + * A version of stop() that is designed to be usable in Java7 closure + * clauses. + * Implementation classes MUST relay this directly to {@link #stop()} + * + * @throws java.io.IOException never + * @throws RuntimeException on any failure during the stop operation + */ + @Override + public void close() throws IOException { + stop(); + } + + @Override + public Graph getBlueprintsGraph() { + return titanGraph; + } + + @Override + public KeyIndexableGraph getIndexableGraph() { + return titanGraph; + } + + @Override + public TransactionalGraph getTransactionalGraph() { + return titanGraph; + } + + public TitanGraph getTitanGraph() { + return titanGraph; + } + + @Override + public Set<String> getVertexIndexedKeys() { + return vertexIndexedKeys; + } + + @Override + public Set<String> getEdgeIndexedKeys() { + return edgeIndexedKeys; + } +} diff --git a/repository/src/main/java/org/apache/hadoop/metadata/util/GraphUtils.java b/repository/src/main/java/org/apache/hadoop/metadata/util/GraphUtils.java new file mode 100644 index 0000000..a949c56 --- /dev/null +++ b/repository/src/main/java/org/apache/hadoop/metadata/util/GraphUtils.java @@ -0,0 +1,64 @@ +package org.apache.hadoop.metadata.util; + +import com.tinkerpop.blueprints.Direction; +import com.tinkerpop.blueprints.Edge; +import com.tinkerpop.blueprints.Graph; +import com.tinkerpop.blueprints.GraphQuery; +import com.tinkerpop.blueprints.Vertex; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +/** + * Utility class for graph operations. + */ +public final class GraphUtils { + + private static final Logger LOG = LoggerFactory.getLogger(GraphUtils.class); + + private GraphUtils() { + } + + public static Vertex findVertex(Graph blueprintsGraph, + String entityName, String entityType) { + LOG.debug("Finding vertex for: name={}, type={}", entityName, entityType); + + GraphQuery query = blueprintsGraph.query() + .has("entityName", entityName) + .has("entityType", entityType); + Iterator<Vertex> results = query.vertices().iterator(); + // returning one since name/type is unique + return results.hasNext() ? results.next() : null; + } + + public static Map<String, String> extractProperties(Vertex entityVertex) { + Map<String, String> properties = new HashMap<>(); + for (String key : entityVertex.getPropertyKeys()) { + properties.put(key, String.valueOf(entityVertex.getProperty(key))); + } + + return properties; + } + + public static String vertexString(final Vertex vertex) { + StringBuilder properties = new StringBuilder(); + for (String propertyKey : vertex.getPropertyKeys()) { + properties.append(propertyKey) + .append("=").append(vertex.getProperty(propertyKey)) + .append(", "); + } + + return "v[" + vertex.getId() + "], Properties[" + properties + "]"; + } + + public static String edgeString(final Edge edge) { + return "e[" + edge.getLabel() + "], [" + + edge.getVertex(Direction.OUT).getProperty("name") + + " -> " + edge.getLabel() + " -> " + + edge.getVertex(Direction.IN).getProperty("name") + + "]"; + } +} \ No newline at end of file diff --git a/repository/src/main/resources/log4j.xml b/repository/src/main/resources/log4j.xml new file mode 100644 index 0000000..6af85cc --- /dev/null +++ b/repository/src/main/resources/log4j.xml @@ -0,0 +1,77 @@ +<?xml version="1.0" encoding="UTF-8" ?> +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> + +<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/"> + <appender name="console" class="org.apache.log4j.ConsoleAppender"> + <param name="Target" value="System.out"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/> + </layout> + </appender> + + <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender"> + <param name="File" value="${user.dir}/target/logs/application.log"/> + <param name="Append" value="true"/> + <param name="Threshold" value="debug"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/> + </layout> + </appender> + + <appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender"> + <param name="File" value="${user.dir}/target/logs/audit.log"/> + <param name="Append" value="true"/> + <param name="Threshold" value="debug"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %x %m%n"/> + </layout> + </appender> + + <appender name="METRIC" class="org.apache.log4j.DailyRollingFileAppender"> + <param name="File" value="${user.dir}/target/logs/metric.log"/> + <param name="Append" value="true"/> + <param name="Threshold" value="debug"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %m%n"/> + </layout> + </appender> + + <logger name="AUDIT" additivity="false"> + <level value="debug"/> + <appender-ref ref="console"/> + </logger> + + <logger name="METRIC" additivity="false"> + <level value="debug"/> + <appender-ref ref="console"/> + </logger> + + <logger name="org.apache.hadoop.metadata" additivity="false"> + <level value="debug"/> + <appender-ref ref="FILE"/> + </logger> + + <root> + <priority value="debug"/> + <appender-ref ref="console"/> + </root> + +</log4j:configuration> diff --git a/repository/src/test/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryServiceTest.java b/repository/src/test/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryServiceTest.java new file mode 100644 index 0000000..8958aa0 --- /dev/null +++ b/repository/src/test/java/org/apache/hadoop/metadata/services/GraphBackedMetadataRepositoryServiceTest.java @@ -0,0 +1,103 @@ +package org.apache.hadoop.metadata.services; + +import org.apache.hadoop.metadata.service.Services; +import org.json.simple.JSONValue; +import org.testng.Assert; +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class GraphBackedMetadataRepositoryServiceTest { + + private static final String ENTITY_NAME = "clicks-table"; + private static final String ENTITY_TYPE = "hive-table"; + private static final String DATABASE_NAME = "ads"; + private static final String TABLE_NAME = "clicks-table"; + + private TitanGraphService titanGraphService; + private GraphBackedMetadataRepositoryService repositoryService; + + @BeforeClass + public void setUp() throws Exception { + titanGraphService = new TitanGraphService(); + titanGraphService.start(); + Services.get().register(titanGraphService); + + repositoryService = new GraphBackedMetadataRepositoryService(); + repositoryService.start(); + Services.get().register(repositoryService); + } + + @AfterClass + public void tearDown() throws Exception { + Services.get().getService(GraphBackedMetadataRepositoryService.NAME).close(); + Services.get().getService(TitanGraphService.NAME).close(); + Services.get().reset(); + } + + @Test + public void testGetName() throws Exception { + Assert.assertEquals(GraphBackedMetadataRepositoryService.NAME, + GraphBackedMetadataRepositoryService.class.getSimpleName()); + Assert.assertEquals(repositoryService.getName(), GraphBackedMetadataRepositoryService.NAME); + } + + @Test + public void testSubmitEntity() throws Exception { + String entityStream = getTestEntityJSON(); + String guid = repositoryService.submitEntity(entityStream, ENTITY_TYPE); + Assert.assertNotNull(guid); + } + + private String getTestEntityJSON() { + Map<String, String> props = new HashMap<>(); + props.put("entityName", ENTITY_NAME); + props.put("entityType", ENTITY_TYPE); + props.put("database", DATABASE_NAME); + props.put("table", TABLE_NAME); + return JSONValue.toJSONString(props); + } + + @Test (dependsOnMethods = "testSubmitEntity") + public void testGetEntityDefinition() throws Exception { + String entity = repositoryService.getEntityDefinition(ENTITY_NAME, ENTITY_TYPE); + Map<String, String> entityProperties = + (Map<String, String>) JSONValue.parseWithException(entity); + Assert.assertNotNull(entityProperties.get("guid")); + Assert.assertEquals(entityProperties.get("entityName"), ENTITY_NAME); + Assert.assertEquals(entityProperties.get("entityType"), ENTITY_TYPE); + Assert.assertEquals(entityProperties.get("database"), DATABASE_NAME); + Assert.assertEquals(entityProperties.get("table"), TABLE_NAME); + } + + @Test + public void testGetEntityDefinitionNonExistent() throws Exception { + String entity = repositoryService.getEntityDefinition("blah", "blah"); + Assert.assertNull(entity); + } + + @Test + public void testGetEntityList() throws Exception { + List<String> entityList = repositoryService.getEntityList(ENTITY_TYPE); + Assert.assertNotNull(entityList); + Assert.assertEquals(entityList.size(), 0); // as this is not implemented yet + } + + @Test (expectedExceptions = RuntimeException.class) + public void testStartWithOutGraphServiceRegistration() throws Exception { + try { + Services.get().reset(); + GraphBackedMetadataRepositoryService repositoryService = new + GraphBackedMetadataRepositoryService(); + repositoryService.start(); + Assert.fail("This should have thrown an exception"); + } finally { + Services.get().register(titanGraphService); + Services.get().register(repositoryService); + } + } +} diff --git a/repository/src/test/java/org/apache/hadoop/metadata/services/TitanGraphServiceTest.java b/repository/src/test/java/org/apache/hadoop/metadata/services/TitanGraphServiceTest.java new file mode 100644 index 0000000..58a1b24 --- /dev/null +++ b/repository/src/test/java/org/apache/hadoop/metadata/services/TitanGraphServiceTest.java @@ -0,0 +1,68 @@ +package org.apache.hadoop.metadata.services; + +import org.testng.Assert; +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +/** + * Unit test for TitanGraphService. + */ +public class TitanGraphServiceTest { + + private TitanGraphService titanGraphService; + + @BeforeClass + public void setUp() throws Exception { + titanGraphService = new TitanGraphService(); + titanGraphService.start(); + } + + @AfterClass + public void tearDown() throws Exception { + titanGraphService.close(); + } + + @Test + public void testGetName() throws Exception { + Assert.assertEquals(TitanGraphService.NAME, TitanGraphService.class.getSimpleName()); + Assert.assertEquals(titanGraphService.getName(), TitanGraphService.NAME); + } + + @Test + public void testStart() throws Exception { + Assert.assertNotNull(titanGraphService.getBlueprintsGraph()); + } + + @Test + public void testGetBlueprintsGraph() throws Exception { + Assert.assertNotNull(titanGraphService.getBlueprintsGraph()); + } + + @Test + public void testGetIndexableGraph() throws Exception { + Assert.assertNotNull(titanGraphService.getIndexableGraph()); + } + + @Test + public void testGetTransactionalGraph() throws Exception { + Assert.assertNotNull(titanGraphService.getTransactionalGraph()); + } + + @Test + public void testGetTitanGraph() throws Exception { + Assert.assertNotNull(titanGraphService.getTitanGraph()); + } + + @Test + public void testGetVertexIndexedKeys() throws Exception { + Assert.assertNotNull(titanGraphService.getVertexIndexedKeys()); + Assert.assertEquals(titanGraphService.getVertexIndexedKeys().size(), 0); + } + + @Test + public void testGetEdgeIndexedKeys() throws Exception { + Assert.assertNotNull(titanGraphService.getEdgeIndexedKeys()); + Assert.assertEquals(titanGraphService.getEdgeIndexedKeys().size(), 0); + } +} \ No newline at end of file diff --git a/repository/src/test/resources/application.properties b/repository/src/test/resources/application.properties new file mode 100644 index 0000000..2c88054 --- /dev/null +++ b/repository/src/test/resources/application.properties @@ -0,0 +1,36 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +application.services=org.apache.hadoop.metadata.services.TitanGraphService,\ + org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService + + +# Graph implementation +#metadata.graph.blueprints.graph=com.thinkaurelius.titan.core.TitanFactory + +# Graph Storage +metadata.graph.storage.backend=berkeleyje +metadata.graph.storage.directory=target/data/berkeley + +# Graph Search Index +metadata.graph.index.search.backend=elasticsearch +metadata.graph.index.search.directory=target/data/es +metadata.graph.index.search.elasticsearch.client-only=false +metadata.graph.index.search.elasticsearch.local-mode=true + +metadata.enableTLS=false diff --git a/webapp/pom.xml b/webapp/pom.xml new file mode 100644 index 0000000..bfb99f3 --- /dev/null +++ b/webapp/pom.xml @@ -0,0 +1,298 @@ +<?xml version="1.0" encoding="UTF-8"?> + +<!-- ~ Licensed to the Apache Software Foundation (ASF) under one ~ or more + contributor license agreements. See the NOTICE file ~ distributed with this + work for additional information ~ regarding copyright ownership. The ASF + licenses this file ~ to you under the Apache License, Version 2.0 (the ~ + "License"); you may not use this file except in compliance ~ with the License. + You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ ~ Unless required by applicable law or agreed to in writing, software ~ + distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the + License for the specific language governing permissions and ~ limitations + under the License. --> + +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> + + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.apache.hadoop.metadata</groupId> + <artifactId>metadata-governance</artifactId> + <version>0.1-incubating-SNAPSHOT</version> + </parent> + <artifactId>metadata-webapp</artifactId> + <description>Apache Metadata Web Application</description> + <name>Apache Metadata Web Application</name> + <packaging>war</packaging> + + <profiles> + <profile> + <id>hadoop-2</id> + <activation> + <activeByDefault>true</activeByDefault> + </activation> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + <scope>test</scope> + </dependency> + </dependencies> + </profile> + </profiles> + + <dependencies> + <dependency> + <groupId>org.apache.hadoop.metadata</groupId> + <artifactId>metadata-common</artifactId> + </dependency> + + <dependency> + <groupId>org.apache.hadoop.metadata</groupId> + <artifactId>metadata-repository</artifactId> + </dependency> + + <dependency> + <groupId>joda-time</groupId> + <artifactId>joda-time</artifactId> + </dependency> + + <dependency> + <groupId>commons-configuration</groupId> + <artifactId>commons-configuration</artifactId> + </dependency> + + <dependency> + <groupId>commons-cli</groupId> + <artifactId>commons-cli</artifactId> + </dependency> + + <dependency> + <groupId>com.tinkerpop.blueprints</groupId> + <artifactId>blueprints-core</artifactId> + </dependency> + + <dependency> + <groupId>com.thinkaurelius.titan</groupId> + <artifactId>titan-core</artifactId> + </dependency> + + <dependency> + <groupId>com.googlecode.json-simple</groupId> + <artifactId>json-simple</artifactId> + </dependency> + + <dependency> + <groupId>javax.servlet.jsp</groupId> + <artifactId>jsp-api</artifactId> + </dependency> + + <dependency> + <groupId>org.mortbay.jetty</groupId> + <artifactId>jetty</artifactId> + </dependency> + + <dependency> + <groupId>org.mortbay.jetty</groupId> + <artifactId>jetty-plus</artifactId> + </dependency> + + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-core</artifactId> + </dependency> + + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-server</artifactId> + </dependency> + + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-client</artifactId> + </dependency> + + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-json</artifactId> + </dependency> + + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-all</artifactId> + </dependency> + + <dependency> + <groupId>org.testng</groupId> + <artifactId>testng</artifactId> + </dependency> + </dependencies> + + <build> + <pluginManagement> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-javadoc-plugin</artifactId> + <executions> + <execution> + <id>uber-javadocs</id> + <phase>site</phase> + <goals> + <goal>javadoc</goal> + <goal>jar</goal> + </goals> + <configuration> + <includeTransitiveDependencySources>false</includeTransitiveDependencySources> + <includeDependencySources>true</includeDependencySources> + <dependencySourceIncludes> + <dependencySourceInclude>org.apache.hadoop.metadata:*</dependencySourceInclude> + </dependencySourceIncludes> + </configuration> + </execution> + </executions> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-war-plugin</artifactId> + <version>2.4</version> + <configuration> + <webResources> + <resource> + <directory>src/main/webapp/WEB-INF</directory> + <targetPath>WEB-INF</targetPath> + </resource> + </webResources> + </configuration> + </plugin> + + <plugin> + <artifactId>maven-resources-plugin</artifactId> + <version>2.6</version> + <executions> + <execution> + <id>copy-resources</id> + <phase>validate</phase> + <goals> + <goal>copy-resources</goal> + </goals> + <configuration> + <outputDirectory>${project.build.directory}/webapps</outputDirectory> + <resources> + <resource> + <directory>src/conf</directory> + <filtering>true</filtering> + </resource> + </resources> + </configuration> + </execution> + </executions> + </plugin> + + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>keytool-maven-plugin</artifactId> + <executions> + <execution> + <phase>generate-resources</phase> + <id>clean-server</id> + <goals> + <goal>clean</goal> + </goals> + </execution> + <execution> + <phase>generate-resources</phase> + <id>server</id> + <goals> + <goal>generateKeyPair</goal> + </goals> + </execution> + </executions> + <configuration> + <dname>cn=metadata.incubator.apache.org</dname> + <keystore>${project.build.directory}/metadata.keystore</keystore> + <keypass>metadata-passwd</keypass> + <storepass>metadata-passwd</storepass> + <alias>metadata</alias> + <keyalg>RSA</keyalg> + <validity>100000</validity> + </configuration> + </plugin> + + <plugin> + <groupId>org.mortbay.jetty</groupId> + <artifactId>maven-jetty-plugin</artifactId> + <version>${jetty.version}</version> + <configuration> + <skip>${skipITs}</skip> <!--only skip int tests --> + <connectors> + <connector implementation="org.mortbay.jetty.security.SslSocketConnector"> + <port>21443</port> + <maxIdleTime>60000</maxIdleTime> + <keystore>${project.build.directory}/../../webapp/target/metadata.keystore</keystore> + <keyPassword>metadata-passwd</keyPassword> + <password>metadata-passwd</password> + </connector> + <connector implementation="org.mortbay.jetty.nio.SelectChannelConnector"> + <port>21000</port> + <maxIdleTime>60000</maxIdleTime> + </connector> + </connectors> + <webApp>${project.build.directory}/metadata-webapp-${project.version}</webApp> + <useTestClasspath>true</useTestClasspath> + <systemProperties> + <systemProperty> + <name>hadoop.conf.dir</name> + <value>${project.build.directory}/webapps/hadoop/conf</value> + </systemProperty> + <systemProperty> + <name>hadoop.tmp.dir</name> + <value>${project.build.directory}/tmp-hadoop-${user.name}</value> + </systemProperty> + <systemProperty> + <name>hadoop.log.dir</name> + <value>${project.build.directory}/logs</value> + </systemProperty> + <systemProperty> + <name>system.lib.location</name> + <value>${project.build.directory}/dependency</value> + </systemProperty> + <systemProperty> + <name>keystore.file</name> + <value>${project.build.directory}/../../webapp/target/metadata.keystore + </value> + </systemProperty> + <systemProperty> + <name>truststore.file</name> + <value>${project.build.directory}/../../webapp/target/metadata.keystore</value> + </systemProperty> + </systemProperties> + <stopKey>metadata-stop</stopKey> + <stopPort>41001</stopPort> + </configuration> + <executions> + <execution> + <id>start-jetty</id> + <phase>pre-integration-test</phase> + <goals> + <goal>run</goal> + </goals> + <configuration> + <daemon>true</daemon> + </configuration> + </execution> + <execution> + <id>stop-jetty</id> + <phase>post-integration-test</phase> + <goals> + <goal>stop</goal> + </goals> + </execution> + </executions> + </plugin> + </plugins> + </pluginManagement> + </build> +</project> diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/Main.java b/webapp/src/main/java/org/apache/hadoop/metadata/Main.java new file mode 100644 index 0000000..5581462 --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/Main.java @@ -0,0 +1,111 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.GnuParser; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.metadata.web.service.EmbeddedServer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Driver for running Metadata as a standalone server with embedded jetty server. + */ +public final class Main { + private static final Logger LOG = LoggerFactory.getLogger(Main.class); + private static final String APP_PATH = "app"; + private static final String APP_PORT = "port"; + + /** + * Prevent users from constructing this. + */ + private Main() { + } + + private static CommandLine parseArgs(String[] args) throws ParseException { + Options options = new Options(); + Option opt; + + opt = new Option(APP_PATH, true, "Application Path"); + opt.setRequired(false); + options.addOption(opt); + + opt = new Option(APP_PORT, true, "Application Port"); + opt.setRequired(false); + options.addOption(opt); + + return new GnuParser().parse(options, args); + } + + public static void main(String[] args) throws Exception { + CommandLine cmd = parseArgs(args); + // todo: enable version for webapp + // String projectVersion = getProjectVersion(); + // String appPath = "webapp/target/metadata-webapp-" + projectVersion; + String appPath = "webapp/target/metadata-governance"; + + if (cmd.hasOption(APP_PATH)) { + appPath = cmd.getOptionValue(APP_PATH); + } + + PropertiesConfiguration configuration = new PropertiesConfiguration("application.properties"); + final String enableTLSFlag = configuration.getString("metadata.enableTLS"); + final int appPort = getApplicationPort(cmd, enableTLSFlag); + final boolean enableTLS = isTLSEnabled(enableTLSFlag, appPort); + configuration.setProperty("metadata.enableTLS", String.valueOf(enableTLS)); + + LOG.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); + LOG.info("Server starting with TLS ? {} on port {}", enableTLS, appPort); + LOG.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<"); + EmbeddedServer server = EmbeddedServer.newServer(appPort, appPath, enableTLS); + server.start(); + } + +/* + private static String getProjectVersion() throws ConfigurationException { + PropertiesConfiguration configuration = + new PropertiesConfiguration("metadata-buildinfo.properties"); + return configuration.getString("project.version"); + } +*/ + + private static int getApplicationPort(CommandLine cmd, String enableTLSFlag) { + final int appPort; + if (cmd.hasOption(APP_PORT)) { + appPort = Integer.valueOf(cmd.getOptionValue(APP_PORT)); + } else { + // default : metadata.enableTLS is true + appPort = StringUtils.isEmpty(enableTLSFlag) + || enableTLSFlag.equals("true") ? 21443 : 21000; + } + + return appPort; + } + + private static boolean isTLSEnabled(String enableTLSFlag, int appPort) { + return Boolean.valueOf(StringUtils.isEmpty(enableTLSFlag) + ? System.getProperty("metadata.enableTLS", (appPort % 1000) == 443 ? "true" : "false") + : enableTLSFlag); + } +} diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/errors/LoggingExceptionMapper.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/errors/LoggingExceptionMapper.java new file mode 100644 index 0000000..b638ab5 --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/errors/LoggingExceptionMapper.java @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.errors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Response; +import javax.ws.rs.ext.ExceptionMapper; +import java.util.concurrent.ThreadLocalRandom; + +/** + * Exception mapper for Jersey. + * @param <E> + */ +public class LoggingExceptionMapper<E extends Throwable> implements ExceptionMapper<E> { + private static final Logger LOGGER = LoggerFactory.getLogger(LoggingExceptionMapper.class); + + @Override + public Response toResponse(E exception) { + if (exception instanceof WebApplicationException) { + return ((WebApplicationException) exception).getResponse(); + } + + final long id = ThreadLocalRandom.current().nextLong(); + logException(id, exception); + return Response.serverError() + .entity(formatErrorMessage(id, exception)) + .build(); + } + + @SuppressWarnings("UnusedParameters") + protected String formatErrorMessage(long id, E exception) { + return String.format("There was an error processing your request. It has been logged (ID %016x).", id); + } + + protected void logException(long id, E exception) { + LOGGER.error(formatLogMessage(id, exception), exception); + } + + @SuppressWarnings("UnusedParameters") + protected String formatLogMessage(long id, Throwable exception) { + return String.format("Error handling a request: %016x", id); + } +} diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/filters/AuditFilter.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/filters/AuditFilter.java new file mode 100644 index 0000000..1248394 --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/filters/AuditFilter.java @@ -0,0 +1,106 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.filters; + +import org.apache.hadoop.metadata.util.DateTimeHelper; +import org.apache.hadoop.metadata.web.util.Servlets; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.util.Date; +import java.util.UUID; + +/** + * This records audit information as part of the filter after processing the request + * and also introduces a UUID into request and response for tracing requests in logs. + */ +public class AuditFilter implements Filter { + + private static final Logger AUDIT_LOG = LoggerFactory.getLogger("AUDIT"); + private static final Logger LOG = LoggerFactory.getLogger(AuditFilter.class); + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + LOG.info("AuditFilter initialization started"); + } + + @Override + public void doFilter(ServletRequest request, + ServletResponse response, + FilterChain filterChain) throws IOException, ServletException { + final String requestTimeISO9601 = DateTimeHelper.formatDateUTC(new Date()); + final HttpServletRequest httpRequest = (HttpServletRequest) request; + final String requestId = UUID.randomUUID().toString(); + final Thread currentThread = Thread.currentThread(); + final String oldName = currentThread.getName(); + + try { + currentThread.setName(formatName(oldName, requestId)); + filterChain.doFilter(request, response); + } finally { + recordAudit(httpRequest, requestTimeISO9601); + + // put the request id into the response so users can trace logs for this request + ((HttpServletResponse) response).setHeader(Servlets.REQUEST_ID, requestId); + currentThread.setName(oldName); + } + } + + private String formatName(String oldName, String requestId) { + return oldName + " - " + requestId; + } + + private void recordAudit(HttpServletRequest httpRequest, String whenISO9601) { + final String who = getUserFromRequest(httpRequest); + final String fromHost = httpRequest.getRemoteHost(); + final String fromAddress = httpRequest.getRemoteAddr(); + final String whatURL = Servlets.getRequestURL(httpRequest); + final String whatAddrs = httpRequest.getLocalAddr(); + + LOG.debug("Audit: {}/{} performed request {} ({}) at time {}", + who, fromAddress, whatURL, whatAddrs, whenISO9601); + audit(who, fromAddress, fromHost, whatURL, whatAddrs, whenISO9601); + } + + private String getUserFromRequest(HttpServletRequest httpRequest) { + // look for the user in the request + final String userFromRequest = Servlets.getUserFromRequest(httpRequest); + return userFromRequest == null ? "UNKNOWN" : userFromRequest; + } + + private void audit(String who, String fromAddress, String fromHost, String whatURL, + String whatAddrs, String whenISO9601) { + AUDIT_LOG.info("Audit: {}/{}-{} performed request {} ({}) at time {}", + who, fromAddress, fromHost, whatURL, whatAddrs, whenISO9601); + } + + @Override + public void destroy() { + // do nothing + } +} \ No newline at end of file diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/filters/AuthenticationFilter.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/filters/AuthenticationFilter.java new file mode 100644 index 0000000..cf8ae70 --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/filters/AuthenticationFilter.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.filters; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import java.io.IOException; + +/** + * This enforces authentication as part of the filter before processing the request. + * todo: Subclass of {@link org.apache.hadoop.security.authentication.server.AuthenticationFilter}. + */ +public class AuthenticationFilter implements Filter { + private static final Logger LOG = LoggerFactory.getLogger(AuthenticationFilter.class); + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + LOG.info("AuthenticationFilter initialization started"); + } + + @Override + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) + throws IOException, ServletException { + chain.doFilter(request, response); + } + + @Override + public void destroy() { + // do nothing + } +} diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/listeners/ApplicationStartupListener.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/listeners/ApplicationStartupListener.java new file mode 100644 index 0000000..ba2f7c0 --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/listeners/ApplicationStartupListener.java @@ -0,0 +1,81 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.listeners; + +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.hadoop.metadata.MetadataException; +import org.apache.hadoop.metadata.service.ServiceInitializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.servlet.ServletContextEvent; +import javax.servlet.ServletContextListener; + +/** + * Listener for bootstrapping Services and configuration properties. + */ +public class ApplicationStartupListener implements ServletContextListener { + + private static final Logger LOG = LoggerFactory.getLogger(ApplicationStartupListener.class); + + private final ServiceInitializer startupServices = new ServiceInitializer(); + + @Override + public void contextInitialized(ServletContextEvent sce) { + try { + startupServices.initialize(); + showStartupInfo(); + } catch (MetadataException e) { + throw new RuntimeException("Error starting services", e); + } + } + + private void showStartupInfo() { + StringBuilder buffer = new StringBuilder(); + buffer.append("\n############################################"); + buffer.append("\n Metadata Server (STARTED) "); + buffer.append("\n############################################"); + + try { + PropertiesConfiguration configuration = new PropertiesConfiguration("application.properties"); + buffer.append(configuration.toString()); + + } catch (ConfigurationException e) { + buffer.append("*** Unable to get build info ***").append(e.getMessage()); + } + + LOG.info(buffer.toString()); + } + + @Override + public void contextDestroyed(ServletContextEvent sce) { + try { + startupServices.destroy(); + } catch (MetadataException e) { + LOG.warn("Error destroying services", e); + } + + StringBuilder buffer = new StringBuilder(); + buffer.append("\n############################################"); + buffer.append("\n Metadata Server (SHUTDOWN) "); + buffer.append("\n############################################"); + LOG.info(buffer.toString()); + } +} \ No newline at end of file diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/params/AbstractParam.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/params/AbstractParam.java new file mode 100644 index 0000000..a936d25 --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/params/AbstractParam.java @@ -0,0 +1,132 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.params; + +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; + +/** + * An abstract base class from which to build Jersey parameter classes. + * + * @param <T> the type of value wrapped by the parameter + */ +public abstract class AbstractParam<T> { + private final T value; + + /** + * Given an input value from a client, creates a parameter wrapping its parsed value. + * + * @param input an input value from a client request + */ + @SuppressWarnings({"AbstractMethodCallInConstructor", "OverriddenMethodCallDuringObjectConstruction"}) + protected AbstractParam(String input) { + try { + this.value = parse(input); + } catch (Exception e) { + throw new WebApplicationException(error(input, e)); + } + } + + /** + * Given a string representation which was unable to be parsed and the exception thrown, produce + * a {@link javax.ws.rs.core.Response} to be sent to the client. + * + * By default, generates a {@code 400 Bad Request} with a plain text entity generated by + * {@link #errorMessage(String, Exception)}. + * + * @param input the raw input value + * @param e the exception thrown while parsing {@code input} + * @return the {@link javax.ws.rs.core.Response} to be sent to the client + */ + protected Response error(String input, Exception e) { + return Response.status(getErrorStatus()) + .entity(errorMessage(input, e)) + .type(mediaType()) + .build(); + } + + /** + * Returns the media type of the error message entity. + * + * @return the media type of the error message entity + */ + protected MediaType mediaType() { + return MediaType.TEXT_PLAIN_TYPE; + } + + /** + * Given a string representation which was unable to be parsed and the exception thrown, produce + * an entity to be sent to the client. + * + * @param input the raw input value + * @param e the exception thrown while parsing {@code input} + * @return the error message to be sent the client + */ + protected String errorMessage(String input, Exception e) { + return String.format("Invalid parameter: %s (%s)", input, e.getMessage()); + } + + /** + * Given a string representation which was unable to be parsed, produce a {@link javax.ws.rs.core.Response.Status} for the + * {@link Response} to be sent to the client. + * + * @return the HTTP {@link javax.ws.rs.core.Response.Status} of the error message + */ + @SuppressWarnings("MethodMayBeStatic") + protected Response.Status getErrorStatus() { + return Response.Status.BAD_REQUEST; + } + + /** + * Given a string representation, parse it and return an instance of the parameter type. + * + * @param input the raw input + * @return {@code input}, parsed as an instance of {@code T} + * @throws Exception if there is an error parsing the input + */ + protected abstract T parse(String input) throws Exception; + + /** + * Returns the underlying value. + * + * @return the underlying value + */ + public T get() { + return value; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { return true; } + if ((obj == null) || (getClass() != obj.getClass())) { return false; } + final AbstractParam<?> that = (AbstractParam<?>) obj; + return value.equals(that.value); + } + + @Override + public int hashCode() { + return value.hashCode(); + } + + @Override + public String toString() { + return value.toString(); + } +} \ No newline at end of file diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/params/BooleanParam.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/params/BooleanParam.java new file mode 100644 index 0000000..01f40fc --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/params/BooleanParam.java @@ -0,0 +1,48 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.params; + +/** + * A parameter encapsulating boolean values. If the query parameter value is {@code "true"}, + * regardless of case, the returned value is {@link Boolean#TRUE}. If the query parameter value is + * {@code "false"}, regardless of case, the returned value is {@link Boolean#FALSE}. All other + * values will return a {@code 400 Bad Request} response. + */ +public class BooleanParam extends AbstractParam<Boolean> { + + public BooleanParam(String input) { + super(input); + } + + @Override + protected String errorMessage(String input, Exception e) { + return '"' + input + "\" must be \"true\" or \"false\"."; + } + + @Override + protected Boolean parse(String input) throws Exception { + if ("true".equalsIgnoreCase(input)) { + return Boolean.TRUE; + } + if ("false".equalsIgnoreCase(input)) { + return Boolean.FALSE; + } + throw new Exception(); + } +} \ No newline at end of file diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/params/DateTimeParam.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/params/DateTimeParam.java new file mode 100644 index 0000000..b40ed05 --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/params/DateTimeParam.java @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.params; + +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +/** + * A parameter encapsulating date/time values. All non-parsable values will return a {@code 400 Bad + * Request} response. All values returned are in UTC. + */ +public class DateTimeParam extends AbstractParam<DateTime> { + + public DateTimeParam(String input) { + super(input); + } + + @Override + protected DateTime parse(String input) throws Exception { + return new DateTime(input, DateTimeZone.UTC); + } +} \ No newline at end of file diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/AdminResource.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/AdminResource.java new file mode 100644 index 0000000..1c4b5d7 --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/AdminResource.java @@ -0,0 +1,82 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.resources; + +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.metadata.web.util.Servlets; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; + +/** + * Jersey Resource for admin operations. + */ +@Path("admin") +public class AdminResource { + + @GET + @Path("stack") + @Produces(MediaType.TEXT_PLAIN) + public String getThreadDump() { + ThreadGroup topThreadGroup = Thread.currentThread().getThreadGroup(); + + while (topThreadGroup.getParent() != null) { + topThreadGroup = topThreadGroup.getParent(); + } + Thread[] threads = new Thread[topThreadGroup.activeCount()]; + + int nr = topThreadGroup.enumerate(threads); + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < nr; i++) { + builder.append(threads[i].getName()).append("\nState: "). + append(threads[i].getState()).append("\n"); + String stackTrace = StringUtils.join(threads[i].getStackTrace(), "\n"); + builder.append(stackTrace); + } + return builder.toString(); + } + + private Response version; + + @GET + @Path("version") + @Produces(MediaType.APPLICATION_JSON) + public Response getVersion() { + if (version == null) { + try { + JSONObject response = new JSONObject(); + response.put("Version", "v0.1"); // todo: get version + // todo: add hadoop version? + // response.put("Hadoop", VersionInfo.getVersion() + "-r" + VersionInfo.getRevision()); + version = Response.ok(response).build(); + } catch (JSONException e) { + throw new WebApplicationException( + Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + } + } + + return version; + } +} diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/EntityResource.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/EntityResource.java new file mode 100644 index 0000000..d2e8412 --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/EntityResource.java @@ -0,0 +1,170 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.resources; + +import com.google.common.base.Preconditions; +import org.apache.commons.io.IOUtils; +import org.apache.hadoop.metadata.service.Services; +import org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService; +import org.apache.hadoop.metadata.services.MetadataRepositoryService; +import org.apache.hadoop.metadata.web.util.Servlets; +import org.codehaus.jettison.json.JSONObject; +import org.json.simple.JSONValue; +import org.json.simple.parser.ParseException; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.DefaultValue; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import java.io.IOException; +import java.io.StringWriter; + +/** + * Entity management operations as REST API. + */ +@Path("entities") +public class EntityResource { + + private MetadataRepositoryService repositoryService; + + public EntityResource() { + repositoryService = Services.get().getService(GraphBackedMetadataRepositoryService.NAME); + if (repositoryService == null) { + throw new RuntimeException("graph service is not initialized"); + } + } + + @POST + @Path("submit/{entityType}") + @Consumes(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON) + public Response submit(@Context HttpServletRequest request, + @PathParam("entityType") final String entityType) { + try { + final String entity = getEntity(request, entityType); + System.out.println("entity = " + entity); + validateEntity(entity, entityType); + + final String guid = repositoryService.submitEntity(entity, entityType); + JSONObject response = new JSONObject(); + response.put("GUID", guid); + + return Response.ok(response).build(); + } catch (Exception e) { + throw new WebApplicationException( + Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + } + } + + private String getEntity(HttpServletRequest request, + String entityType) throws IOException { + StringWriter writer = new StringWriter(); + IOUtils.copy(request.getInputStream(), writer); + return writer.toString(); + } + + private void validateEntity(String entity, String entityType) throws ParseException { + Preconditions.checkNotNull(entity, "entity cannot be null"); + Preconditions.checkNotNull(entityType, "entity type cannot be null"); + JSONValue.parseWithException(entity); + } + + @GET + @Path("definition/{guid}") + @Produces(MediaType.APPLICATION_JSON) + public Response getEntityDefinition(@PathParam("guid") String guid) { + + return Response.ok().build(); + } + + @GET + @Path("definition/{entityType}/{entityName}") + @Produces(MediaType.APPLICATION_JSON) + public Response getEntityDefinition(@PathParam("entityType") String entityType, + @PathParam("entityName") String entityName) { + final String entityDefinition = repositoryService.getEntityDefinition(entityName, entityType); + return (entityDefinition == null) + ? Response.status(Response.Status.NOT_FOUND).build() + : Response.ok(entityDefinition).build(); + } + + @POST + @Path("validate/{entityType}") + @Consumes(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON) + public Response validate(@Context HttpServletRequest request, + @PathParam("entityType") String entityType) { + return Response.ok().build(); + } + + @DELETE + @Path("delete/{entityType}/{entityName}") + @Produces(MediaType.APPLICATION_JSON) + public Response delete( + @Context HttpServletRequest request, + @PathParam("entityType") final String entityType, + @PathParam("entityName") final String entityName) { + return Response.ok().build(); + } + + @POST + @Path("update/{entityType}/{entityName}") + @Consumes(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON) + public Response update(@Context HttpServletRequest request, + @PathParam("entityType") final String entityType, + @PathParam("entityName") final String entityName) { + return Response.ok().build(); + } + + @GET + @Path("status/{entityType}/{entityName}") + @Produces(MediaType.APPLICATION_JSON) + public Response getStatus(@PathParam("entityType") String entityType, + @PathParam("entityName") String entityName) { + return Response.ok().build(); + } + + @GET + @Path("dependencies/{entityType}/{entityName}") + @Produces(MediaType.APPLICATION_JSON) + public Response getDependencies(@PathParam("entityType") String entityType, + @PathParam("entityName") String entityName) { + return Response.ok().build(); + } + + @GET + @Path("list/{entityType}") + @Produces(MediaType.APPLICATION_JSON) + public Response getEntityList(@PathParam("entityType") String entityType, + @DefaultValue("0") @QueryParam("offset") Integer offset, + @QueryParam("numResults") Integer resultsPerPage) { + return Response.ok().build(); + } +} diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/MetadataDiscoveryResource.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/MetadataDiscoveryResource.java new file mode 100644 index 0000000..ac0ae5b --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/MetadataDiscoveryResource.java @@ -0,0 +1,28 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.resources; + +import javax.ws.rs.Path; + +/** + * Jersey Resource for metadata operations. + */ +@Path("discovery") +public class MetadataDiscoveryResource { +} diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/RexsterGraphResource.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/RexsterGraphResource.java new file mode 100644 index 0000000..4740c74 --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/RexsterGraphResource.java @@ -0,0 +1,410 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.resources; + +import com.tinkerpop.blueprints.Direction; +import com.tinkerpop.blueprints.Edge; +import com.tinkerpop.blueprints.Element; +import com.tinkerpop.blueprints.Graph; +import com.tinkerpop.blueprints.Vertex; +import com.tinkerpop.blueprints.VertexQuery; +import com.tinkerpop.blueprints.util.io.graphson.GraphSONMode; +import com.tinkerpop.blueprints.util.io.graphson.GraphSONUtility; +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.metadata.service.Services; +import org.apache.hadoop.metadata.services.GraphService; +import org.apache.hadoop.metadata.services.TitanGraphService; +import org.apache.hadoop.metadata.web.util.Servlets; +import org.codehaus.jettison.json.JSONArray; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.ws.rs.DefaultValue; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +/** + * Jersey Resource for lineage metadata operations. + * Implements most of the GET operations of Rexster API with out the indexes. + * https://github.com/tinkerpop/rexster/wiki/Basic-REST-API + */ +@Path("graph") +public class RexsterGraphResource { + private static final Logger LOG = LoggerFactory.getLogger(RexsterGraphResource.class); + + public static final String RESULTS = "results"; + public static final String TOTAL_SIZE = "totalSize"; + + private GraphService graphService; + + public RexsterGraphResource() { + graphService = Services.get().getService(TitanGraphService.NAME); + if (graphService == null) { + throw new WebApplicationException(Response + .status(Response.Status.INTERNAL_SERVER_ERROR) + .tag("graph service is not initialized") + .build()); + } + } + + protected Graph getGraph() { + return graphService.getBlueprintsGraph(); + } + + protected Set<String> getVertexIndexedKeys() { + return graphService.getVertexIndexedKeys(); + } + + protected Set<String> getEdgeIndexedKeys() { + return graphService.getEdgeIndexedKeys(); + } + + /** + * Get a single vertex with a unique id. + * + * GET http://host/metadata/lineage/vertices/id + * graph.getVertex(id); + */ + @GET + @Path("/vertices/{id}") + @Produces({MediaType.APPLICATION_JSON}) + public Response getVertex(@PathParam("id") final String vertexId) { + LOG.info("Get vertex for vertexId= {}", vertexId); + validateInputs("Invalid argument: vertex id passed is null or empty.", vertexId); + try { + Vertex vertex = findVertex(vertexId); + + JSONObject response = new JSONObject(); + response.put(RESULTS, GraphSONUtility.jsonFromElement( + vertex, getVertexIndexedKeys(), GraphSONMode.NORMAL)); + return Response.ok(response).build(); + } catch (JSONException e) { + throw new WebApplicationException( + Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + } + } + + private Vertex findVertex(String vertexId) { + Vertex vertex = getGraph().getVertex(vertexId); + if (vertex == null) { + String message = "Vertex with [" + vertexId + "] cannot be found."; + LOG.info(message); + throw new WebApplicationException( + Servlets.getErrorResponse(message, Response.Status.NOT_FOUND)); + } + + return vertex; + } + + /** + * Get properties for a single vertex with a unique id. + * This is NOT a rexster API. + * <p/> + * GET http://host/metadata/lineage/vertices/properties/id + */ + @GET + @Path("/vertices/properties/{id}") + @Produces({MediaType.APPLICATION_JSON}) + public Response getVertexProperties(@PathParam("id") final String vertexId, + @DefaultValue("false") @QueryParam("relationships") + final String relationships) { + LOG.info("Get vertex for vertexId= {}", vertexId); + validateInputs("Invalid argument: vertex id passed is null or empty.", vertexId); + try { + Vertex vertex = findVertex(vertexId); + + Map<String, String> vertexProperties = + getVertexProperties(vertex, Boolean.valueOf(relationships)); + + JSONObject response = new JSONObject(); + response.put(RESULTS, new JSONObject(vertexProperties)); + response.put(TOTAL_SIZE, vertexProperties.size()); + return Response.ok(response).build(); + } catch (JSONException e) { + throw new WebApplicationException( + Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + } + } + + private Map<String, String> getVertexProperties(Vertex vertex, boolean captureRelationships) { + Map<String, String> vertexProperties = new HashMap<>(); + for (String key : vertex.getPropertyKeys()) { + vertexProperties.put(key, vertex.<String>getProperty(key)); + } + + // todo: get the properties from relationships + + return vertexProperties; + } + + /** + * Get a list of vertices matching a property key and a value. + * <p/> + * GET http://host/metadata/lineage/vertices?key=<key>&value=<value> + * graph.getVertices(key, value); + */ + @GET + @Path("/vertices") + @Produces({MediaType.APPLICATION_JSON}) + public Response getVertices(@QueryParam("key") final String key, + @QueryParam("value") final String value) { + LOG.info("Get vertices for property key= {}, value= {}", key, value); + validateInputs("Invalid argument: key or value passed is null or empty.", key, value); + try { + JSONObject response = buildJSONResponse(getGraph().getVertices(key, value)); + return Response.ok(response).build(); + + } catch (JSONException e) { + throw new WebApplicationException( + Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + } + } + + /** + * Get a list of adjacent edges with a direction. + * + * GET http://host/metadata/lineage/vertices/id/direction + * graph.getVertex(id).get{Direction}Edges(); + * direction: {(?!outE)(?!bothE)(?!inE)(?!out)(?!both)(?!in)(?!query).+} + */ + @GET + @Path("vertices/{id}/{direction}") + @Produces({MediaType.APPLICATION_JSON}) + public Response getVertexEdges(@PathParam("id") String vertexId, + @PathParam("direction") String direction) { + LOG.info("Get vertex edges for vertexId= {}, direction= {}", vertexId, direction); + // Validate vertex id. Direction is validated in VertexQueryArguments. + validateInputs("Invalid argument: vertex id or direction passed is null or empty.", vertexId, direction); + try { + Vertex vertex = findVertex(vertexId); + + return getVertexEdges(vertex, direction); + + } catch (JSONException e) { + throw new WebApplicationException( + Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + } + } + + private Response getVertexEdges(Vertex vertex, String direction) throws JSONException { + // break out the segment into the return and the direction + VertexQueryArguments queryArguments = new VertexQueryArguments(direction); + // if this is a query and the _return is "count" then we don't bother to send back the result array + boolean countOnly = queryArguments.isCountOnly(); + // what kind of data the calling client wants back (vertices, edges, count, vertex identifiers) + ReturnType returnType = queryArguments.getReturnType(); + // the query direction (both, out, in) + Direction queryDirection = queryArguments.getQueryDirection(); + + VertexQuery query = vertex.query().direction(queryDirection); + + JSONArray elementArray = new JSONArray(); + long counter = 0; + if (returnType == ReturnType.VERTICES || returnType == ReturnType.VERTEX_IDS) { + Iterable<Vertex> vertexQueryResults = query.vertices(); + for (Vertex v : vertexQueryResults) { + if (returnType.equals(ReturnType.VERTICES)) { + elementArray.put(GraphSONUtility.jsonFromElement( + v, getVertexIndexedKeys(), GraphSONMode.NORMAL)); + } else { + elementArray.put(v.getId()); + } + counter++; + } + } else if (returnType == ReturnType.EDGES) { + Iterable<Edge> edgeQueryResults = query.edges(); + for (Edge e : edgeQueryResults) { + elementArray.put(GraphSONUtility.jsonFromElement( + e, getEdgeIndexedKeys(), GraphSONMode.NORMAL)); + counter++; + } + } else if (returnType == ReturnType.COUNT) { + counter = query.count(); + } + + JSONObject response = new JSONObject(); + if (!countOnly) { + response.put(RESULTS, elementArray); + } + response.put(TOTAL_SIZE, counter); + return Response.ok(response).build(); + } + + /** + * Get a single edge with a unique id. + * + * GET http://host/metadata/lineage/edges/id + * graph.getEdge(id); + */ + @GET + @Path("/edges/{id}") + @Produces({MediaType.APPLICATION_JSON}) + public Response getEdge(@PathParam("id") final String edgeId) { + LOG.info("Get vertex for edgeId= {}", edgeId); + validateInputs("Invalid argument: edge id passed is null or empty.", edgeId); + try { + Edge edge = getGraph().getEdge(edgeId); + if (edge == null) { + String message = "Edge with [" + edgeId + "] cannot be found."; + LOG.info(message); + throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND) + .entity(JSONObject.quote(message)).build()); + } + + JSONObject response = new JSONObject(); + response.put(RESULTS, GraphSONUtility.jsonFromElement( + edge, getEdgeIndexedKeys(), GraphSONMode.NORMAL)); + return Response.ok(response).build(); + } catch (JSONException e) { + throw new WebApplicationException( + Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + } + } + + private <T extends Element> JSONObject buildJSONResponse(Iterable<T> elements) throws JSONException { + JSONArray vertexArray = new JSONArray(); + long counter = 0; + for (Element element : elements) { + counter++; + vertexArray.put(GraphSONUtility.jsonFromElement( + element, getVertexIndexedKeys(), GraphSONMode.NORMAL)); + } + + JSONObject response = new JSONObject(); + response.put(RESULTS, vertexArray); + response.put(TOTAL_SIZE, counter); + + return response; + } + + private static void validateInputs(String errorMsg, String... inputs) { + for (String input : inputs) { + if (StringUtils.isEmpty(input)) { + throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) + .entity(errorMsg) + .type("text/plain") + .build()); + } + } + } + + private enum ReturnType {VERTICES, EDGES, COUNT, VERTEX_IDS} + + public static final String OUT_E = "outE"; + public static final String IN_E = "inE"; + public static final String BOTH_E = "bothE"; + public static final String OUT = "out"; + public static final String IN = "in"; + public static final String BOTH = "both"; + public static final String OUT_COUNT = "outCount"; + public static final String IN_COUNT = "inCount"; + public static final String BOTH_COUNT = "bothCount"; + public static final String OUT_IDS = "outIds"; + public static final String IN_IDS = "inIds"; + public static final String BOTH_IDS = "bothIds"; + + /** + * Helper class for query arguments. + */ + public static final class VertexQueryArguments { + + private final Direction queryDirection; + private final ReturnType returnType; + private final boolean countOnly; + + public VertexQueryArguments(String directionSegment) { + if (OUT_E.equals(directionSegment)) { + returnType = ReturnType.EDGES; + queryDirection = Direction.OUT; + countOnly = false; + } else if (IN_E.equals(directionSegment)) { + returnType = ReturnType.EDGES; + queryDirection = Direction.IN; + countOnly = false; + } else if (BOTH_E.equals(directionSegment)) { + returnType = ReturnType.EDGES; + queryDirection = Direction.BOTH; + countOnly = false; + } else if (OUT.equals(directionSegment)) { + returnType = ReturnType.VERTICES; + queryDirection = Direction.OUT; + countOnly = false; + } else if (IN.equals(directionSegment)) { + returnType = ReturnType.VERTICES; + queryDirection = Direction.IN; + countOnly = false; + } else if (BOTH.equals(directionSegment)) { + returnType = ReturnType.VERTICES; + queryDirection = Direction.BOTH; + countOnly = false; + } else if (BOTH_COUNT.equals(directionSegment)) { + returnType = ReturnType.COUNT; + queryDirection = Direction.BOTH; + countOnly = true; + } else if (IN_COUNT.equals(directionSegment)) { + returnType = ReturnType.COUNT; + queryDirection = Direction.IN; + countOnly = true; + } else if (OUT_COUNT.equals(directionSegment)) { + returnType = ReturnType.COUNT; + queryDirection = Direction.OUT; + countOnly = true; + } else if (BOTH_IDS.equals(directionSegment)) { + returnType = ReturnType.VERTEX_IDS; + queryDirection = Direction.BOTH; + countOnly = false; + } else if (IN_IDS.equals(directionSegment)) { + returnType = ReturnType.VERTEX_IDS; + queryDirection = Direction.IN; + countOnly = false; + } else if (OUT_IDS.equals(directionSegment)) { + returnType = ReturnType.VERTEX_IDS; + queryDirection = Direction.OUT; + countOnly = false; + } else { + throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) + .entity(JSONObject.quote(directionSegment + " segment was invalid.")) + .build()); + } + } + + public Direction getQueryDirection() { + return queryDirection; + } + + public ReturnType getReturnType() { + return returnType; + } + + public boolean isCountOnly() { + return countOnly; + } + } +} diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/TypesResource.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/TypesResource.java new file mode 100644 index 0000000..f872fcd --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/resources/TypesResource.java @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.resources; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.*; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; + +/** + * This class provides RESTful API for Types. + */ +@Path("types") +public class TypesResource { + + @POST + @Path("submit/{type}") + @Consumes(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON) + public Response submit(@Context HttpServletRequest request, + @PathParam("type") String type) { + return Response.ok().build(); + } + + @DELETE + @Path("delete/{type}") + @Produces(MediaType.APPLICATION_JSON) + public Response delete(@Context HttpServletRequest request, + @PathParam("type") String type) { + // todo - should this be supported? + return Response.status(Response.Status.BAD_REQUEST).build(); + } + + @POST + @Path("update/{type}") + @Produces(MediaType.APPLICATION_JSON) + public Response update(@Context HttpServletRequest request, + @PathParam("type") String type) { + return Response.ok().build(); + } +} diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/service/EmbeddedServer.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/service/EmbeddedServer.java new file mode 100644 index 0000000..f516401 --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/service/EmbeddedServer.java @@ -0,0 +1,84 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.service; + +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.mortbay.jetty.Connector; +import org.mortbay.jetty.Server; +import org.mortbay.jetty.bio.SocketConnector; +import org.mortbay.jetty.webapp.WebAppContext; + +/** + * This class embeds a Jetty server and a connector. + */ +public class EmbeddedServer { + private static final int DEFAULT_BUFFER_SIZE = 16192; + + protected final Server server = new Server(); + + public EmbeddedServer(int port, String path) { + Connector connector = getConnector(port); + server.addConnector(connector); + + WebAppContext application = new WebAppContext(path, "/"); + server.setHandler(application); + } + + protected Connector getConnector(int port) { + Connector connector = new SocketConnector(); + connector.setPort(port); + connector.setHost("0.0.0.0"); + + // this is to enable large header sizes when Kerberos is enabled with AD + final Integer bufferSize = getBufferSize(); + connector.setHeaderBufferSize(bufferSize); + connector.setRequestBufferSize(bufferSize); + + return connector; + } + + private Integer getBufferSize() { + try { + PropertiesConfiguration configuration = new PropertiesConfiguration("application.properties"); + return configuration.getInt("metadata.jetty.request.buffer.size", DEFAULT_BUFFER_SIZE); + } catch (ConfigurationException e) { + // do nothing + } + + return DEFAULT_BUFFER_SIZE; + } + + public void start() throws Exception { + server.start(); + server.join(); + } + + public void stop() throws Exception { + server.stop(); + } + + public static EmbeddedServer newServer(int port, String path, boolean secure) { + if (secure) { + return new SecureEmbeddedServer(port, path); + } else { + return new EmbeddedServer(port, path); + } + } +} diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/service/SecureEmbeddedServer.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/service/SecureEmbeddedServer.java new file mode 100644 index 0000000..c26057c --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/service/SecureEmbeddedServer.java @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.service; + +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.mortbay.jetty.Connector; +import org.mortbay.jetty.security.SslSocketConnector; + +/** + * This is a jetty server which requires client auth via certificates. + */ +public class SecureEmbeddedServer extends EmbeddedServer { + + public SecureEmbeddedServer(int port, String path) { + super(port, path); + } + + protected Connector getConnector(int port) { + PropertiesConfiguration config = getConfiguration(); + + SslSocketConnector connector = new SslSocketConnector(); + connector.setPort(port); + connector.setHost("0.0.0.0"); + connector.setKeystore(config.getString("keystore.file", + System.getProperty("keystore.file", "conf/metadata.keystore"))); + connector.setKeyPassword(config.getString("keystore.password", + System.getProperty("keystore.password", "metadata-passwd"))); + connector.setTruststore(config.getString("truststore.file", + System.getProperty("truststore.file", "conf/metadata.keystore"))); + connector.setTrustPassword(config.getString("truststore.password", + System.getProperty("truststore.password", "metadata-passwd"))); + connector.setPassword(config.getString("password", + System.getProperty("password", "metadata-passwd"))); + connector.setWantClientAuth(true); + return connector; + } + + private PropertiesConfiguration getConfiguration() { + try { + return new PropertiesConfiguration("application.properties"); + } catch (ConfigurationException e) { + throw new RuntimeException("Unable to load configuration: application.properties"); + } + } +} \ No newline at end of file diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/util/Servlets.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/util/Servlets.java new file mode 100644 index 0000000..c86068c --- /dev/null +++ b/webapp/src/main/java/org/apache/hadoop/metadata/web/util/Servlets.java @@ -0,0 +1,105 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.util; + +import org.apache.commons.lang.StringUtils; +import org.codehaus.jettison.json.JSONObject; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; + +/** + * Utility functions for dealing with servlets. + */ +public final class Servlets { + + public static final String REQUEST_ID = "requestId"; + + private Servlets() { + /* singleton */ + } + + /** + * Returns the user of the given request. + * + * @param httpRequest an HTTP servlet request + * @return the user + */ + public static String getUserFromRequest(HttpServletRequest httpRequest) { + String user = httpRequest.getRemoteUser(); + if (!StringUtils.isEmpty(user)) { + return user; + } + + user = httpRequest.getParameter("user.name"); // available in query-param + if (!StringUtils.isEmpty(user)) { + return user; + } + + user = httpRequest.getHeader("Remote-User"); // backwards-compatibility + if (!StringUtils.isEmpty(user)) { + return user; + } + + return null; + } + + /** + * Returns the URI of the given request. + * + * @param httpRequest an HTTP servlet request + * @return the URI, including the query string + */ + public static String getRequestURI(HttpServletRequest httpRequest) { + final StringBuilder url = new StringBuilder(100).append(httpRequest.getRequestURI()); + if (httpRequest.getQueryString() != null) { + url.append('?').append(httpRequest.getQueryString()); + } + + return url.toString(); + } + + /** + * Returns the full URL of the given request. + * + * @param httpRequest an HTTP servlet request + * @return the full URL, including the query string + */ + public static String getRequestURL(HttpServletRequest httpRequest) { + final StringBuilder url = new StringBuilder(100).append(httpRequest.getRequestURL()); + if (httpRequest.getQueryString() != null) { + url.append('?').append(httpRequest.getQueryString()); + } + + return url.toString(); + } + + public static Response getErrorResponse(Throwable e, Response.Status status) { + return getErrorResponse(e.getMessage(), status); + } + + public static Response getErrorResponse(String message, Response.Status status) { + return Response + .status(status) + .entity(JSONObject.quote(message)) + .type(MediaType.APPLICATION_JSON) + .build(); + } +} diff --git a/webapp/src/main/resources/application.properties b/webapp/src/main/resources/application.properties new file mode 100644 index 0000000..b8186c8 --- /dev/null +++ b/webapp/src/main/resources/application.properties @@ -0,0 +1,54 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +application.services=org.apache.hadoop.metadata.services.TitanGraphService,\ + org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService + + +######### Implementation classes ######### +## DO NOT MODIFY UNLESS SURE ABOUT CHANGE ## + +metadata.GraphService.impl=org.apache.hadoop.metadata.services.TitanGraphService +metadata.MetadataRepositoryService.impl=org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService + +######### Implementation classes ######### + + +######### Graph Database Configs ######### +# Graph implementation +#metadata.graph.blueprints.graph=com.thinkaurelius.titan.core.TitanFactory + +# Graph Storage +metadata.graph.storage.backend=berkeleyje +metadata.graph.storage.directory=target/data/berkeley + +# Graph Search Index +metadata.graph.index.search.backend=elasticsearch +metadata.graph.index.search.directory=target/data/es +metadata.graph.index.search.elasticsearch.client-only=false +metadata.graph.index.search.elasticsearch.local-mode=true + +######### Graph Database Configs ######### + + +######### Security Properties ######### + +# SSL config +metadata.enableTLS=false + +######### Security Properties ######### diff --git a/webapp/src/main/resources/log4j.xml b/webapp/src/main/resources/log4j.xml new file mode 100644 index 0000000..19fd864 --- /dev/null +++ b/webapp/src/main/resources/log4j.xml @@ -0,0 +1,66 @@ +<?xml version="1.0" encoding="UTF-8" ?> +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> + +<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/"> + <appender name="console" class="org.apache.log4j.ConsoleAppender"> + <param name="Target" value="System.out"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/> + </layout> + </appender> + + <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender"> + <param name="File" value="${user.dir}/target/logs/application.log"/> + <param name="Append" value="true"/> + <param name="Threshold" value="debug"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/> + </layout> + </appender> + + <appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender"> + <param name="File" value="${user.dir}/target/logs/audit.log"/> + <param name="Append" value="true"/> + <param name="Threshold" value="debug"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %x %m%n"/> + </layout> + </appender> + + <logger name="org.apache.hadoop.metadata" additivity="false"> + <level value="debug"/> + <appender-ref ref="console"/> + <appender-ref ref="FILE"/> + </logger> + + <logger name="AUDIT"> + <level value="info"/> + <appender-ref ref="console"/> + <appender-ref ref="AUDIT"/> + </logger> + + <root> + <priority value="info"/> + <appender-ref ref="console"/> + <appender-ref ref="FILE"/> + </root> + +</log4j:configuration> diff --git a/webapp/src/main/resources/metadata-buildinfo.properties b/webapp/src/main/resources/metadata-buildinfo.properties new file mode 100644 index 0000000..5a7cb82 --- /dev/null +++ b/webapp/src/main/resources/metadata-buildinfo.properties @@ -0,0 +1,28 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +###################### +*.domain=all + +*.build.user=${user.name} +*.build.epoch=${timestamp} +*.project.version=${pom.version} +*.build.version=${pom.version}-r${buildNumber} +*.vc.revision=${buildNumber} +*.vc.source.url=${scm.connection} +###################### diff --git a/webapp/src/main/webapp/WEB-INF/web.xml b/webapp/src/main/webapp/WEB-INF/web.xml new file mode 100644 index 0000000..fd8f282 --- /dev/null +++ b/webapp/src/main/webapp/WEB-INF/web.xml @@ -0,0 +1,73 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" + "http://java.sun.com/dtd/web-app_2_3.dtd"> + +<web-app> + + <display-name>Apache Falcon Placeholder</display-name> + <description>Apache Falcon Placeholder</description> + + <filter> + <filter-name>audit</filter-name> + <filter-class>org.apache.hadoop.metadata.web.filters.AuditFilter</filter-class> + </filter> + + <filter> + <filter-name>authentication</filter-name> + <filter-class>org.apache.hadoop.metadata.web.filters.AuthenticationFilter</filter-class> + </filter> + + <filter-mapping> + <filter-name>audit</filter-name> + <servlet-name>MetadataRESTApi</servlet-name> + </filter-mapping> + + <filter-mapping> + <filter-name>authentication</filter-name> + <servlet-name>MetadataRESTApi</servlet-name> + </filter-mapping> + + <listener> + <listener-class>org.apache.hadoop.metadata.web.listeners.ApplicationStartupListener</listener-class> + </listener> + + <servlet> + <servlet-name>MetadataRESTApi</servlet-name> + <servlet-class>com.sun.jersey.spi.container.servlet.ServletContainer</servlet-class> + <init-param> + <param-name>com.sun.jersey.config.property.resourceConfigClass</param-name> + <param-value>com.sun.jersey.api.core.PackagesResourceConfig</param-value> + </init-param> + <init-param> + <param-name>com.sun.jersey.config.property.packages</param-name> + <param-value> + org.apache.hadoop.metadata.web.resources,org.apache.hadoop.metadata.web.params + </param-value> + </init-param> + <load-on-startup>1</load-on-startup> + </servlet> + + <servlet-mapping> + <servlet-name>MetadataRESTApi</servlet-name> + <url-pattern>/api/metadata/*</url-pattern> + </servlet-mapping> + +</web-app> diff --git a/webapp/src/main/webapp/index.html b/webapp/src/main/webapp/index.html new file mode 100644 index 0000000..7a422db --- /dev/null +++ b/webapp/src/main/webapp/index.html @@ -0,0 +1,31 @@ +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> +<!DOCTYPE HTML> +<html lang="en"> +<head> + <meta charset="UTF-8" /> + <meta name="viewport" content="width=device-width, initial-scale=1.0" /> + <meta name="Date-Revision-yyyymmdd" content="20130821" /> + <meta http-equiv="Content-Language" content="en" /> + <title>Apache Falcon - Data management and processing platform</title> +</head> +<body class="topBarEnabled"> +<h1> Apache Metadata</h1> +More information at: <a href="http://dgc.incubator.apache.org/index.html" title="About">Project Website</a> +</body> +</html> diff --git a/webapp/src/test/java/org/apache/hadoop/metadata/GraphRepositoryServiceIT.java b/webapp/src/test/java/org/apache/hadoop/metadata/GraphRepositoryServiceIT.java new file mode 100644 index 0000000..d4be14a --- /dev/null +++ b/webapp/src/test/java/org/apache/hadoop/metadata/GraphRepositoryServiceIT.java @@ -0,0 +1,69 @@ +package org.apache.hadoop.metadata; + +import org.apache.hadoop.metadata.service.Services; +import org.apache.hadoop.metadata.services.GraphBackedMetadataRepositoryService; +import org.apache.hadoop.metadata.services.TitanGraphService; +import org.json.simple.JSONValue; +import org.testng.Assert; +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import java.util.HashMap; +import java.util.Map; + +public class GraphRepositoryServiceIT { + + private static final String ENTITY_NAME = "clicks-table"; + private static final String ENTITY_TYPE = "hive-table"; + private static final String DATABASE_NAME = "ads"; + private static final String TABLE_NAME = "clicks-table"; + + @BeforeClass + public void setUp() throws Exception { + TitanGraphService titanGraphService = new TitanGraphService(); + titanGraphService.start(); + Services.get().register(titanGraphService); + + GraphBackedMetadataRepositoryService repositoryService + = new GraphBackedMetadataRepositoryService(); + repositoryService.start(); + Services.get().register(repositoryService); + } + + @AfterClass + public void tearDown() throws Exception { + Services.get().getService(GraphBackedMetadataRepositoryService.NAME).close(); + Services.get().getService(TitanGraphService.NAME).close(); + Services.get().reset(); + } + + @Test + public void testRepository() throws Exception { + GraphBackedMetadataRepositoryService repositoryService = + Services.get().getService(GraphBackedMetadataRepositoryService.NAME); + + String entityStream = getTestEntityJSON(); + String guid = repositoryService.submitEntity(entityStream, ENTITY_TYPE); + Assert.assertNotNull(guid); + + String entity = repositoryService.getEntityDefinition(ENTITY_NAME, ENTITY_TYPE); + @SuppressWarnings("unchecked") + Map<String, String> entityProperties = + (Map<String, String>) JSONValue.parseWithException(entity); + Assert.assertEquals(entityProperties.get("guid"), guid); + Assert.assertEquals(entityProperties.get("entityName"), ENTITY_NAME); + Assert.assertEquals(entityProperties.get("entityType"), ENTITY_TYPE); + Assert.assertEquals(entityProperties.get("database"), DATABASE_NAME); + Assert.assertEquals(entityProperties.get("table"), TABLE_NAME); + } + + private String getTestEntityJSON() { + Map<String, String> props = new HashMap<>(); + props.put("entityName", ENTITY_NAME); + props.put("entityType", ENTITY_TYPE); + props.put("database", DATABASE_NAME); + props.put("table", TABLE_NAME); + return JSONValue.toJSONString(props); + } +} diff --git a/webapp/src/test/java/org/apache/hadoop/metadata/TitanGraphServiceIT.java b/webapp/src/test/java/org/apache/hadoop/metadata/TitanGraphServiceIT.java new file mode 100644 index 0000000..cb4bead --- /dev/null +++ b/webapp/src/test/java/org/apache/hadoop/metadata/TitanGraphServiceIT.java @@ -0,0 +1,61 @@ +package org.apache.hadoop.metadata; + +import com.tinkerpop.blueprints.GraphQuery; +import com.tinkerpop.blueprints.TransactionalGraph; +import com.tinkerpop.blueprints.Vertex; +import org.apache.hadoop.metadata.services.TitanGraphService; +import org.apache.hadoop.metadata.util.GraphUtils; +import org.testng.annotations.Test; + +import java.util.Iterator; +import java.util.UUID; + +/** + * End to end graph put/get test. + */ +public class TitanGraphServiceIT { + + @Test + public void testTitanGraph() throws Exception { + TitanGraphService titanGraphService = new TitanGraphService(); + titanGraphService.start(); + + try { + String guid = UUID.randomUUID().toString(); + + final TransactionalGraph graph = titanGraphService.getTransactionalGraph(); + System.out.println("graph = " + graph); + System.out.println("graph.getVertices() = " + graph.getVertices()); + + + Vertex entityVertex = null; + try { + graph.rollback(); + entityVertex = graph.addVertex(null); + entityVertex.setProperty("guid", guid); + entityVertex.setProperty("entityName", "entityName"); + entityVertex.setProperty("entityType", "entityType"); + } catch (Exception e) { + graph.rollback(); + e.printStackTrace(); + } finally { + graph.commit(); + } + + System.out.println("vertex = " + GraphUtils.vertexString(entityVertex)); + + GraphQuery query = graph.query() + .has("entityName", "entityName") + .has("entityType", "entityType"); + + Iterator<Vertex> results = query.vertices().iterator(); + if (results.hasNext()) { + Vertex vertexFromQuery = results.next(); + System.out.println("vertex = " + GraphUtils.vertexString(vertexFromQuery)); + } + } finally { + Thread.sleep(1000); + titanGraphService.stop(); + } + } +} diff --git a/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/BaseResourceIT.java b/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/BaseResourceIT.java new file mode 100644 index 0000000..ee51bef --- /dev/null +++ b/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/BaseResourceIT.java @@ -0,0 +1,24 @@ +package org.apache.hadoop.metadata.web.resources; + +import com.sun.jersey.api.client.Client; +import com.sun.jersey.api.client.WebResource; +import com.sun.jersey.api.client.config.DefaultClientConfig; +import org.testng.annotations.BeforeClass; + +import javax.ws.rs.core.UriBuilder; + +public class BaseResourceIT { + + protected WebResource service; + + @BeforeClass + public void setUp() throws Exception { + String baseUrl = "http://localhost:21000/"; + + DefaultClientConfig config = new DefaultClientConfig(); + Client client = Client.create(config); + client.resource(UriBuilder.fromUri(baseUrl).build()); + + service = client.resource(UriBuilder.fromUri(baseUrl).build()); + } +} diff --git a/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/EntityJerseyResourceIT.java b/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/EntityJerseyResourceIT.java new file mode 100644 index 0000000..c4d3221 --- /dev/null +++ b/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/EntityJerseyResourceIT.java @@ -0,0 +1,132 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.metadata.web.resources; + +import com.sun.jersey.api.client.ClientResponse; +import com.sun.jersey.api.client.WebResource; +import org.json.simple.JSONValue; +import org.testng.Assert; +import org.testng.annotations.Test; + +import javax.ws.rs.HttpMethod; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; + +/** + * Integration tests for Entity Jersey Resource. + */ +public class EntityJerseyResourceIT extends BaseResourceIT { + + private static final String ENTITY_NAME = "clicks-table"; + private static final String ENTITY_TYPE = "hive-table"; + private static final String DATABASE_NAME = "ads"; + private static final String TABLE_NAME = "clicks-table"; + + @Test + public void testSubmitEntity() { + String entityStream = getTestEntityJSON(); + + WebResource resource = service + .path("api/metadata/entities/submit") + .path(ENTITY_TYPE); + + ClientResponse clientResponse = resource + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .method(HttpMethod.POST, ClientResponse.class, entityStream); + Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); + String response = clientResponse.getEntity(String.class); + Assert.assertNotNull(response); + + try { + Assert.assertNotNull(UUID.fromString(response)); + } catch (IllegalArgumentException e) { + Assert.fail("Response is not a guid, " + response); + } + } + + @Test (dependsOnMethods = "testSubmitEntity") + public void testGetEntityDefinition() { + WebResource resource = service + .path("api/metadata/entities/definition") + .path(ENTITY_TYPE) + .path(ENTITY_NAME); + + ClientResponse clientResponse = resource + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .method(HttpMethod.GET, ClientResponse.class); + Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); + String response = clientResponse.getEntity(String.class); + System.out.println("response = " + response); + } + + private static String getTestEntityJSON() { + Map<String, String> props = new HashMap<>(); + props.put("entityName", ENTITY_NAME); + props.put("entityType", ENTITY_TYPE); + props.put("database", DATABASE_NAME); + props.put("table", TABLE_NAME); + return JSONValue.toJSONString(props); + } + + @Test + public void testGetInvalidEntityDefinition() { + WebResource resource = service + .path("api/metadata/entities/definition") + .path(ENTITY_TYPE) + .path("blah"); + + ClientResponse clientResponse = resource + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .method(HttpMethod.GET, ClientResponse.class); + Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode()); + String response = clientResponse.getEntity(String.class); + System.out.println("response = " + response); + } + + @Test (dependsOnMethods = "testSubmitEntity") + public void testGetEntityList() { + ClientResponse clientResponse = service + .path("api/metadata/entities/list/") + .path(ENTITY_TYPE) + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .method(HttpMethod.GET, ClientResponse.class); + Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); + String response = clientResponse.getEntity(String.class); + System.out.println("response = " + response); + } + + @Test (enabled = false) // todo: enable this later + public void testGetEntityListForBadEntityType() { + ClientResponse clientResponse = service + .path("api/metadata/entities/list/blah") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .method(HttpMethod.GET, ClientResponse.class); + Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode()); + String response = clientResponse.getEntity(String.class); + System.out.println("response = " + response); + } +} diff --git a/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/RexsterGraphJerseyResourceIT.java b/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/RexsterGraphJerseyResourceIT.java new file mode 100644 index 0000000..ccfd375 --- /dev/null +++ b/webapp/src/test/java/org/apache/hadoop/metadata/web/resources/RexsterGraphJerseyResourceIT.java @@ -0,0 +1,65 @@ +package org.apache.hadoop.metadata.web.resources; + +import com.sun.jersey.api.client.ClientResponse; +import com.sun.jersey.api.client.WebResource; +import org.testng.Assert; +import org.testng.annotations.Test; + +import javax.ws.rs.HttpMethod; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; + +/** + * Integration tests for Rexster Graph Jersey Resource. + */ +public class RexsterGraphJerseyResourceIT extends BaseResourceIT { + + @Test (enabled = false) + public void testGetVertex() throws Exception { + // todo: add a vertex before fetching it + + WebResource resource = service + .path("api/metadata/graph/vertices") + .path("0"); + + ClientResponse clientResponse = resource + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .method(HttpMethod.GET, ClientResponse.class); + Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); + String response = clientResponse.getEntity(String.class); + Assert.assertNotNull(response); + } + + @Test + public void testGetVertexWithInvalidId() throws Exception { + WebResource resource = service + .path("api/metadata/graph/vertices/blah"); + + ClientResponse clientResponse = resource + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .method(HttpMethod.GET, ClientResponse.class); + Assert.assertEquals(clientResponse.getStatus(), Response.Status.NOT_FOUND.getStatusCode()); + } + + @Test + public void testGetVertexProperties() throws Exception { + + } + + @Test + public void testGetVertices() throws Exception { + + } + + @Test + public void testGetVertexEdges() throws Exception { + + } + + @Test + public void testGetEdge() throws Exception { + + } +}