Commit 26048109 by Venkatesh Seetharam

Refactor packages and scripts to Atlas

(cherry picked from commit 414beba5)
parent c71b5b31
...@@ -83,21 +83,21 @@ c. Using DGI ...@@ -83,21 +83,21 @@ c. Using DGI
~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~
* Verify if the server is up and running * Verify if the server is up and running
curl -v http://localhost:21000/api/metadata/admin/version curl -v http://localhost:21000/api/atlas/admin/version
{"Version":"v0.1"} {"Version":"v0.1"}
* List the types in the repository * List the types in the repository
curl -v http://localhost:21000/api/metadata/types curl -v http://localhost:21000/api/atlas/types
{"list":["biginteger","short","byte","int","string","bigdecimal","boolean","date","double","long","float"],"requestId":"902580786@qtp-1479771328-0"} {"list":["biginteger","short","byte","int","string","bigdecimal","boolean","date","double","long","float"],"requestId":"902580786@qtp-1479771328-0"}
* List the instances for a given type * List the instances for a given type
curl -v http://localhost:21000/api/metadata/entities?type=hive_table curl -v http://localhost:21000/api/atlas/entities?type=hive_table
{"requestId":"788558007@qtp-44808654-5","list":["cb9b5513-c672-42cb-8477-b8f3e537a162","ec985719-a794-4c98-b98f-0509bd23aac0","48998f81-f1d3-45a2-989a-223af5c1ed6e","a54b386e-c759-4651-8779-a099294244c4"]} {"requestId":"788558007@qtp-44808654-5","list":["cb9b5513-c672-42cb-8477-b8f3e537a162","ec985719-a794-4c98-b98f-0509bd23aac0","48998f81-f1d3-45a2-989a-223af5c1ed6e","a54b386e-c759-4651-8779-a099294244c4"]}
curl -v http://localhost:21000/api/metadata/entities/list/hive_db curl -v http://localhost:21000/api/atlas/entities/list/hive_db
* Search for entities (instances) in the repository * Search for entities (instances) in the repository
curl -v http://localhost:21000/api/metadata/discovery/search/dsl?query="from hive_table" curl -v http://localhost:21000/api/atlas/discovery/search/dsl?query="from hive_table"
d. Stopping DGI Server d. Stopping DGI Server
~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~
......
...@@ -22,14 +22,14 @@ ...@@ -22,14 +22,14 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<artifactId>metadata-governance</artifactId> <artifactId>apache-atlas</artifactId>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
<relativePath>../../</relativePath> <relativePath>../../</relativePath>
</parent> </parent>
<artifactId>falcon-bridge</artifactId> <artifactId>falcon-bridge</artifactId>
<description>Apache Metadata Falcon Bridge Module</description> <description>Apache Atlas Falcon Bridge Module</description>
<name>Apache Metadata Falcon Bridge</name> <name>Apache Atlas Falcon Bridge</name>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
...@@ -50,13 +50,13 @@ ...@@ -50,13 +50,13 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-typesystem</artifactId> <artifactId>atlas-typesystem</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-repository</artifactId> <artifactId>atlas-repository</artifactId>
</dependency> </dependency>
<dependency> <dependency>
......
...@@ -22,14 +22,14 @@ ...@@ -22,14 +22,14 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<artifactId>metadata-governance</artifactId> <artifactId>apache-atlas</artifactId>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
<relativePath>../../</relativePath> <relativePath>../../</relativePath>
</parent> </parent>
<artifactId>hive-bridge</artifactId> <artifactId>hive-bridge</artifactId>
<description>Apache Metadata Hive Bridge Module</description> <description>Apache Atlas Hive Bridge Module</description>
<name>Apache Metadata Hive Bridge</name> <name>Apache Atlas Hive Bridge</name>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
...@@ -76,8 +76,8 @@ ...@@ -76,8 +76,8 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-client</artifactId> <artifactId>atlas-client</artifactId>
<version>${version}</version> <version>${version}</version>
<exclusions> <exclusions>
<exclusion> <exclusion>
...@@ -90,8 +90,8 @@ ...@@ -90,8 +90,8 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-typesystem</artifactId> <artifactId>atlas-typesystem</artifactId>
</dependency> </dependency>
<dependency> <dependency>
...@@ -110,8 +110,8 @@ ...@@ -110,8 +110,8 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-webapp</artifactId> <artifactId>atlas-webapp</artifactId>
<classifier>classes</classifier> <classifier>classes</classifier>
</dependency> </dependency>
...@@ -194,12 +194,12 @@ ...@@ -194,12 +194,12 @@
</artifactItem> </artifactItem>
<artifactItem> <artifactItem>
<groupId>${project.groupId}</groupId> <groupId>${project.groupId}</groupId>
<artifactId>metadata-client</artifactId> <artifactId>atlas-client</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</artifactItem> </artifactItem>
<artifactItem> <artifactItem>
<groupId>${project.groupId}</groupId> <groupId>${project.groupId}</groupId>
<artifactId>metadata-typesystem</artifactId> <artifactId>atlas-typesystem</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</artifactItem> </artifactItem>
<artifactItem> <artifactItem>
...@@ -241,20 +241,20 @@ ...@@ -241,20 +241,20 @@
<maxIdleTime>60000</maxIdleTime> <maxIdleTime>60000</maxIdleTime>
</connector> </connector>
</connectors> </connectors>
<webApp>../../webapp/target/metadata-webapp-${project.version}.war</webApp> <webApp>../../webapp/target/atlas-webapp-${project.version}.war</webApp>
<contextPath>/</contextPath> <contextPath>/</contextPath>
<useTestClasspath>true</useTestClasspath> <useTestClasspath>true</useTestClasspath>
<systemProperties> <systemProperties>
<systemProperty> <systemProperty>
<name>metadata.log.dir</name> <name>atlas.log.dir</name>
<value>${project.build.directory}/logs</value> <value>${project.build.directory}/logs</value>
</systemProperty> </systemProperty>
<systemProperty> <systemProperty>
<name>metadata.conf</name> <name>atlas.conf</name>
<value>addons/hive-bridge/src/test/resources</value> <value>addons/hive-bridge/src/test/resources</value>
</systemProperty> </systemProperty>
</systemProperties> </systemProperties>
<stopKey>metadata-stop</stopKey> <stopKey>atlas-stop</stopKey>
<stopPort>41001</stopPort> <stopPort>41001</stopPort>
</configuration> </configuration>
<executions> <executions>
......
...@@ -87,7 +87,7 @@ export HIVE_CP ...@@ -87,7 +87,7 @@ export HIVE_CP
echo Using Hive configuration directory [$HIVE_CP] echo Using Hive configuration directory [$HIVE_CP]
echo "Logs for import are in $METADATA_LOG_DIR/import-hive.log" echo "Logs for import are in $METADATA_LOG_DIR/import-hive.log"
${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${HIVE_CP}:${METADATACPPATH} org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge ${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${HIVE_CP}:${METADATACPPATH} org.apache.atlas.hive.bridge.HiveMetaStoreBridge
RETVAL=$? RETVAL=$?
[ $RETVAL -eq 0 ] && echo Hive Data Model imported successfully!!! [ $RETVAL -eq 0 ] && echo Hive Data Model imported successfully!!!
......
---+ Hive DGI Bridge ---+ Hive DGI Bridge
Hive metadata can be modelled in DGI using its Type System. The default modelling is available in org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator. It defines the following types: Hive metadata can be modelled in DGI using its Type System. The default modelling is available in org.apache.atlas.hive.model.HiveDataModelGenerator. It defines the following types:
* hive_resource_type(EnumType) - [JAR, FILE, ARCHIVE] * hive_resource_type(EnumType) - [JAR, FILE, ARCHIVE]
* hive_principal_type(EnumType) - [USER, ROLE, GROUP] * hive_principal_type(EnumType) - [USER, ROLE, GROUP]
* hive_function_type(EnumType) - [JAVA] * hive_function_type(EnumType) - [JAVA]
...@@ -19,7 +19,7 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin ...@@ -19,7 +19,7 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin
---++ Importing Hive Metadata ---++ Importing Hive Metadata
org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge imports the hive metadata into DGI using the typesystem defined in org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator. import-hive.sh command can be used to facilitate this. org.apache.atlas.hive.bridge.HiveMetaStoreBridge imports the hive metadata into DGI using the typesystem defined in org.apache.atlas.hive.model.HiveDataModelGenerator. import-hive.sh command can be used to facilitate this.
Set-up the following configs in hive-site.xml of your hive set-up and set environment variable HIVE_CONFIG to the Set-up the following configs in hive-site.xml of your hive set-up and set environment variable HIVE_CONFIG to the
hive conf directory: hive conf directory:
* DGI endpoint - Add the following property with the DGI endpoint for your set-up * DGI endpoint - Add the following property with the DGI endpoint for your set-up
...@@ -38,13 +38,13 @@ Usage: <dgi package>/bin/import-hive.sh. The logs are in <dgi package>/logs/impo ...@@ -38,13 +38,13 @@ Usage: <dgi package>/bin/import-hive.sh. The logs are in <dgi package>/logs/impo
---++ Hive Hook ---++ Hive Hook
Hive supports listeners on hive command execution using hive hooks. This is used to add/update/remove entities in DGI using the model defined in org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator. Hive supports listeners on hive command execution using hive hooks. This is used to add/update/remove entities in DGI using the model defined in org.apache.atlas.hive.model.HiveDataModelGenerator.
The hook submits the request to a thread pool executor to avoid blocking the command execution. Follow the these instructions in your hive set-up to add hive hook for DGI: The hook submits the request to a thread pool executor to avoid blocking the command execution. Follow the these instructions in your hive set-up to add hive hook for DGI:
* Add org.apache.hadoop.metadata.hive.hook.HiveHook as post execution hook in hive-site.xml * Add org.apache.atlas.hive.hook.HiveHook as post execution hook in hive-site.xml
<verbatim> <verbatim>
<property> <property>
<name>hive.exec.post.hooks</name> <name>hive.exec.post.hooks</name>
<value>org.apache.hadoop.metadata.hive.hook.HiveHook</value> <value>org.apache.atlas.hive.hook.HiveHook</value>
</property> </property>
</verbatim> </verbatim>
* Add the following properties in hive-ste.xml with the DGI endpoint for your set-up * Add the following properties in hive-ste.xml with the DGI endpoint for your set-up
......
...@@ -18,25 +18,25 @@ ...@@ -18,25 +18,25 @@
######### Graph Database Configs ######### ######### Graph Database Configs #########
# Graph Storage # Graph Storage
metadata.graph.storage.backend=inmemory atlas.graph.storage.backend=inmemory
# Graph Search Index # Graph Search Index
metadata.graph.index.search.backend=lucene atlas.graph.index.search.backend=lucene
metadata.graph.index.search.directory=target/data/lucene atlas.graph.index.search.directory=target/data/lucene
######### Hive Lineage Configs ######### ######### Hive Lineage Configs #########
# This models reflects the base super types for Data and Process # This models reflects the base super types for Data and Process
#metadata.lineage.hive.table.type.name=DataSet #atlas.lineage.hive.table.type.name=DataSet
#metadata.lineage.hive.process.type.name=Process #atlas.lineage.hive.process.type.name=Process
#metadata.lineage.hive.process.inputs.name=inputs #atlas.lineage.hive.process.inputs.name=inputs
#metadata.lineage.hive.process.outputs.name=outputs #atlas.lineage.hive.process.outputs.name=outputs
## Schema ## Schema
#metadata.lineage.hive.table.schema.query=hive_table where name=?, columns #atlas.lineage.hive.table.schema.query=hive_table where name=?, columns
######### Security Properties ######### ######### Security Properties #########
# SSL config # SSL config
metadata.enableTLS=false atlas.enableTLS=false
...@@ -21,11 +21,11 @@ ...@@ -21,11 +21,11 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-bridge-parent</artifactId> <artifactId>atlas-bridge-parent</artifactId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
</parent> </parent>
<artifactId>metadata-bridge-core</artifactId> <artifactId>atlas-bridge-core</artifactId>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hive</groupId> <groupId>org.apache.hive</groupId>
...@@ -45,8 +45,8 @@ ...@@ -45,8 +45,8 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-repository</artifactId> <artifactId>atlas-repository</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
......
...@@ -16,20 +16,20 @@ ...@@ -16,20 +16,20 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge; package org.apache.atlas.bridge;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.atlas.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.RepositoryException; import org.apache.atlas.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.AttributeInfo; import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.slf4j.Logger; import org.slf4j.Logger;
import java.lang.reflect.Field; import java.lang.reflect.Field;
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge; package org.apache.atlas.bridge;
public abstract class AEntityBean { public abstract class AEntityBean {
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge; package org.apache.atlas.bridge;
import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.MetaException;
......
...@@ -16,21 +16,21 @@ ...@@ -16,21 +16,21 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge; package org.apache.atlas.bridge;
//TODO - Create Index Annotation Framework for BeanConverter //TODO - Create Index Annotation Framework for BeanConverter
//TODO - Enhance Bean Conversion to handled nested objects //TODO - Enhance Bean Conversion to handled nested objects
//TODO - Enhance Bean COnversion to handle Collections //TODO - Enhance Bean COnversion to handle Collections
import org.apache.atlas.MetadataException;
import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
......
...@@ -16,14 +16,14 @@ ...@@ -16,14 +16,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge; package org.apache.atlas.bridge;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge; package org.apache.atlas.bridge;
import java.util.ArrayList; import java.util.ArrayList;
......
...@@ -16,11 +16,11 @@ ...@@ -16,11 +16,11 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge.hivelineage; package org.apache.atlas.bridge.hivelineage;
import org.apache.hadoop.metadata.bridge.ABridge; import org.apache.atlas.bridge.ABridge;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage; import org.apache.atlas.bridge.hivelineage.hook.HiveLineage;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.atlas.repository.MetadataRepository;
import javax.inject.Inject; import javax.inject.Inject;
......
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge.hivelineage.hook; package org.apache.atlas.bridge.hivelineage.hook;
import org.apache.hadoop.metadata.bridge.AEntityBean; import org.apache.atlas.bridge.AEntityBean;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
......
...@@ -16,8 +16,14 @@ ...@@ -16,8 +16,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge.hivestructure; package org.apache.atlas.bridge.hivestructure;
import org.apache.atlas.MetadataException;
import org.apache.atlas.repository.IRepository;
import org.apache.atlas.repository.RepositoryException;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Database;
...@@ -27,12 +33,6 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; ...@@ -27,12 +33,6 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.hadoop.hive.metastore.api.UnknownDBException;
import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.metastore.api.UnknownTableException;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.IRepository;
import org.apache.hadoop.metadata.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.thrift.TException; import org.apache.thrift.TException;
/* /*
* Initial pass at one time importer TODO - needs re-write * Initial pass at one time importer TODO - needs re-write
......
...@@ -16,16 +16,16 @@ ...@@ -16,16 +16,16 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge.hivestructure; package org.apache.atlas.bridge.hivestructure;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.bridge.ABridge; import org.apache.atlas.bridge.ABridge;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.atlas.repository.MetadataRepository;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import javax.inject.Inject; import javax.inject.Inject;
import java.util.ArrayList; import java.util.ArrayList;
......
...@@ -16,16 +16,16 @@ ...@@ -16,16 +16,16 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge.module; package org.apache.atlas.bridge.module;
import com.google.inject.AbstractModule; import com.google.inject.AbstractModule;
import com.google.inject.Scopes; import com.google.inject.Scopes;
import com.google.inject.multibindings.MapBinder; import com.google.inject.multibindings.MapBinder;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.bridge.BridgeTypeBootstrapper;
import org.apache.atlas.bridge.IBridge;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.bridge.BridgeTypeBootstrapper;
import org.apache.hadoop.metadata.bridge.IBridge;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
......
...@@ -16,9 +16,9 @@ ...@@ -16,9 +16,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.web.resources; package org.apache.atlas.web.resources;
import org.apache.hadoop.metadata.bridge.hivelineage.HiveLineageBridge; import org.apache.atlas.bridge.hivelineage.HiveLineageBridge;
import javax.inject.Singleton; import javax.inject.Singleton;
......
...@@ -19,4 +19,4 @@ ...@@ -19,4 +19,4 @@
#BridgeManager.activebridges denotes which bridge defintions to load from the classpath (Comma seperated list of fully qualified class paths) #BridgeManager.activebridges denotes which bridge defintions to load from the classpath (Comma seperated list of fully qualified class paths)
# #
BridgeManager.activeBridges=org.apache.hadoop.metadata.bridge.hivelineage.HiveLineageBridge BridgeManager.activeBridges=org.apache.atlas.bridge.hivelineage.HiveLineageBridge
\ No newline at end of file \ No newline at end of file
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge; package org.apache.atlas.bridge;
import org.apache.hadoop.metadata.RepositoryMetadataModule; import org.apache.atlas.RepositoryMetadataModule;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.atlas.repository.MetadataRepository;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.Guice; import org.testng.annotations.Guice;
import org.testng.annotations.Test; import org.testng.annotations.Test;
......
...@@ -16,9 +16,9 @@ ...@@ -16,9 +16,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge; package org.apache.atlas.bridge;
import org.apache.hadoop.metadata.bridge.module.BridgeModule; import org.apache.atlas.bridge.module.BridgeModule;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.Guice; import org.testng.annotations.Guice;
import org.testng.annotations.Test; import org.testng.annotations.Test;
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge; package org.apache.atlas.bridge;
public class TestGenericBridges { public class TestGenericBridges {
......
...@@ -16,15 +16,15 @@ ...@@ -16,15 +16,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge.hivelineage; package org.apache.atlas.bridge.hivelineage;
import com.google.gson.Gson; import com.google.gson.Gson;
import org.apache.atlas.MetadataException;
import org.apache.atlas.bridge.BridgeTypeBootstrapper;
import org.apache.atlas.bridge.hivelineage.hook.HiveLineage;
import org.apache.atlas.bridge.module.BridgeModule;
import org.apache.atlas.repository.RepositoryException;
import org.apache.commons.collections.IteratorUtils; import org.apache.commons.collections.IteratorUtils;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.bridge.BridgeTypeBootstrapper;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage;
import org.apache.hadoop.metadata.bridge.module.BridgeModule;
import org.apache.hadoop.metadata.repository.RepositoryException;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Guice; import org.testng.annotations.Guice;
......
...@@ -19,4 +19,4 @@ ...@@ -19,4 +19,4 @@
#BridgeManager.activebridges denotes which bridge defintions to load from the classpath (Comma seperated list of fully qualified class paths) #BridgeManager.activebridges denotes which bridge defintions to load from the classpath (Comma seperated list of fully qualified class paths)
# #
BridgeManager.activeBridges=org.apache.hadoop.metadata.bridge.hivelineage.HiveLineageBridge BridgeManager.activeBridges=org.apache.atlas.bridge.hivelineage.HiveLineageBridge
\ No newline at end of file \ No newline at end of file
...@@ -19,4 +19,4 @@ ...@@ -19,4 +19,4 @@
#BridgeManager.activebridges denotes which bridge defintions to load from the classpath (Comma seperated list of fully qualified class paths) #BridgeManager.activebridges denotes which bridge defintions to load from the classpath (Comma seperated list of fully qualified class paths)
# #
BridgeManager.activeBridges=org.apache.hadoop.metadata.bridge.HiveLineage BridgeManager.activeBridges=org.apache.atlas.bridge.HiveLineage
\ No newline at end of file \ No newline at end of file
...@@ -21,11 +21,11 @@ ...@@ -21,11 +21,11 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-bridge-parent</artifactId> <artifactId>atlas-bridge-parent</artifactId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
</parent> </parent>
<artifactId>metadata-bridge-hive</artifactId> <artifactId>atlas-bridge-hive</artifactId>
<packaging>jar</packaging> <packaging>jar</packaging>
<dependencies> <dependencies>
...@@ -93,8 +93,8 @@ ...@@ -93,8 +93,8 @@
<configuration> <configuration>
<artifactItems> <artifactItems>
<artifactItem> <artifactItem>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-common</artifactId> <artifactId>atlas-common</artifactId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
<outputDirectory>${project.build.directory}</outputDirectory> <outputDirectory>${project.build.directory}</outputDirectory>
</artifactItem> </artifactItem>
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge.hivelineage.hook; package org.apache.atlas.bridge.hivelineage.hook;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
......
...@@ -16,9 +16,14 @@ ...@@ -16,9 +16,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge.hivelineage.hook; package org.apache.atlas.bridge.hivelineage.hook;
import com.google.gson.Gson; import com.google.gson.Gson;
import org.apache.atlas.bridge.hivelineage.hook.HiveLineage.CreateColumns;
import org.apache.atlas.bridge.hivelineage.hook.HiveLineage.GroupBy;
import org.apache.atlas.bridge.hivelineage.hook.HiveLineage.QueryColumns;
import org.apache.atlas.bridge.hivelineage.hook.HiveLineage.SourceTables;
import org.apache.atlas.bridge.hivelineage.hook.HiveLineage.WhereClause;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
...@@ -34,11 +39,6 @@ import org.apache.hadoop.hive.ql.parse.HiveParser; ...@@ -34,11 +39,6 @@ import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseException; import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage.CreateColumns;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage.GroupBy;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage.QueryColumns;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage.SourceTables;
import org.apache.hadoop.metadata.bridge.hivelineage.hook.HiveLineage.WhereClause;
import org.apache.log4j.ConsoleAppender; import org.apache.log4j.ConsoleAppender;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge.hivelineage.hook; package org.apache.atlas.bridge.hivelineage.hook;
import com.google.gson.Gson; import com.google.gson.Gson;
...@@ -203,8 +203,8 @@ public class Hook implements ExecuteWithHookContext { ...@@ -203,8 +203,8 @@ public class Hook implements ExecuteWithHookContext {
throws Exception { throws Exception {
String postUri = String String postUri = String
.format("http://%s:%s%s", METADATA_HOST, METADATA_PORT, METADATA_PATH); .format("http://%s:%s%s", METADATA_HOST, METADATA_PORT, METADATA_PATH);
if (conf.getTrimmed("hadoop.metadata.hive.hook.uri") != null) { if (conf.getTrimmed("atlas.hive.hook.uri") != null) {
postUri = conf.getTrimmed("hadoop.metadata.hive.hook.uri"); postUri = conf.getTrimmed("atlas.hive.hook.uri");
} }
Gson gson = new Gson(); Gson gson = new Gson();
String gsonString = gson.toJson(hookData); String gsonString = gson.toJson(hookData);
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.bridge.hivelineage.hook; package org.apache.atlas.bridge.hivelineage.hook;
import com.google.gson.Gson; import com.google.gson.Gson;
import org.apache.hadoop.hive.ql.parse.ParseException; import org.apache.hadoop.hive.ql.parse.ParseException;
......
...@@ -21,15 +21,15 @@ ...@@ -21,15 +21,15 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-governance</artifactId> <artifactId>apache-atlas</artifactId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
<relativePath>../../</relativePath> <relativePath>../../</relativePath>
</parent> </parent>
<artifactId>metadata-bridge-parent</artifactId> <artifactId>atlas-bridge-parent</artifactId>
<packaging>pom</packaging> <packaging>pom</packaging>
<modules> <modules>
<module>metadata-bridge-core</module> <module>atlas-bridge-core</module>
<module>metadata-bridge-hive</module> <module>atlas-bridge-hive</module>
</modules> </modules>
</project> </project>
\ No newline at end of file
...@@ -22,19 +22,19 @@ ...@@ -22,19 +22,19 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<artifactId>metadata-governance</artifactId> <artifactId>apache-atlas</artifactId>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
</parent> </parent>
<artifactId>metadata-client</artifactId> <artifactId>atlas-client</artifactId>
<description>Apache Metadata Client</description> <description>Apache Atlas Client</description>
<name>Apache Metadata Client</name> <name>Apache Atlas Client</name>
<packaging>jar</packaging> <packaging>jar</packaging>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-typesystem</artifactId> <artifactId>atlas-typesystem</artifactId>
</dependency> </dependency>
<dependency> <dependency>
......
...@@ -24,9 +24,9 @@ DgcControllers.controller("ListController", ['$scope','$http', function($scope, ...@@ -24,9 +24,9 @@ DgcControllers.controller("ListController", ['$scope','$http', function($scope,
$scope.executeSearch = function executeSearch() { $scope.executeSearch = function executeSearch() {
$scope.SearchQuery=$scope.query; $scope.SearchQuery=$scope.query;
$scope.iswiki=false; $scope.iswiki=false;
//$http.get('http://162.249.6.76:21000/api/metadata/entities/list/'+$scope.query) //$http.get('http://162.249.6.76:21000/api/atlas/entities/list/'+$scope.query)
var searchQry=$scope.query.split(","); var searchQry=$scope.query.split(",");
$http.get('http://162.249.6.76:21000/api/metadata/discovery/search/fulltext?depth=1&'+searchQry[0]+'&'+searchQry[1]) $http.get('http://162.249.6.76:21000/api/atlas/discovery/search/fulltext?depth=1&'+searchQry[0]+'&'+searchQry[1])
.success(function (data) { .success(function (data) {
$scope.iserror=false; $scope.iserror=false;
...@@ -59,7 +59,7 @@ DgcControllers.controller("DefinitionController", ['$scope','$http','$routeParam ...@@ -59,7 +59,7 @@ DgcControllers.controller("DefinitionController", ['$scope','$http','$routeParam
$scope.selectedDefination={ $scope.selectedDefination={
"path":"wiki.html" "path":"wiki.html"
}; };
$http.get('http://162.249.6.76:21000/api/metadata/entities/definition/'+$routeParams.Id) $http.get('http://162.249.6.76:21000/api/atlas/entities/definition/'+$routeParams.Id)
.success(function (data) { .success(function (data) {
$scope.iserror1=false; $scope.iserror1=false;
$scope.details=angular.fromJson(data.definition); $scope.details=angular.fromJson(data.definition);
...@@ -87,7 +87,7 @@ DgcControllers.controller("LineageController", ['$scope','$http','$routeParams', ...@@ -87,7 +87,7 @@ DgcControllers.controller("LineageController", ['$scope','$http','$routeParams',
$http.get('http://162.249.6.76:21000/api/metadata/discovery/search/relationships/'+$routeParams.Id+'?depth=3&&edgesToFollow=HiveLineage.sourceTables.0,HiveLineage.sourceTables.1,HiveLineage.tableName') $http.get('http://162.249.6.76:21000/api/atlas/discovery/search/relationships/'+$routeParams.Id+'?depth=3&&edgesToFollow=HiveLineage.sourceTables.0,HiveLineage.sourceTables.1,HiveLineage.tableName')
.success(function (data) { .success(function (data) {
$scope.iserror1=false; $scope.iserror1=false;
$scope.lineage=angular.fromJson(data); $scope.lineage=angular.fromJson(data);
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
'use strict'; 'use strict';
angular.module('dgc.details').factory('DetailsResource', ['$resource', function($resource) { angular.module('dgc.details').factory('DetailsResource', ['$resource', function($resource) {
return $resource('/api/metadata/entities/definition/:id', {}, { return $resource('/api/atlas/entities/definition/:id', {}, {
get: { get: {
method: 'GET', method: 'GET',
transformResponse: function(data) { transformResponse: function(data) {
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
'use strict'; 'use strict';
angular.module('dgc.lineage').factory('LineageResource', ['$resource', function($resource) { angular.module('dgc.lineage').factory('LineageResource', ['$resource', function($resource) {
return $resource('/api/metadata/discovery/search/relationships/:id', { return $resource('/api/atlas/discovery/search/relationships/:id', {
depth: 3, depth: 3,
edgesToFollow: 'HiveLineage.sourceTables.0,HiveLineage.sourceTables.1,HiveLineage.sourceTables.2,HiveLineage.tableName' edgesToFollow: 'HiveLineage.sourceTables.0,HiveLineage.sourceTables.1,HiveLineage.sourceTables.2,HiveLineage.tableName'
}); });
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
'use strict'; 'use strict';
angular.module('dgc.search').factory('SearchResource', ['$resource', function($resource) { angular.module('dgc.search').factory('SearchResource', ['$resource', function($resource) {
return $resource('/api/metadata/discovery/search/fulltext', {}, { return $resource('/api/atlas/discovery/search/fulltext', {}, {
search: { search: {
'method': 'GET', 'method': 'GET',
'responseType': 'json', 'responseType': 'json',
......
...@@ -52,7 +52,7 @@ DgcControllers.controller("headerController", ['$scope', '$window', '$location', ...@@ -52,7 +52,7 @@ DgcControllers.controller("headerController", ['$scope', '$window', '$location',
DgcControllers.controller("footerController", ['$scope','$http', function($scope, $http) DgcControllers.controller("footerController", ['$scope','$http', function($scope, $http)
{ {
$http.get('/api/metadata/admin/version') $http.get('/api/atlas/admin/version')
.success(function (data) { .success(function (data) {
$scope.iserror1=false; $scope.iserror1=false;
$scope.apiVersion=data.Version; $scope.apiVersion=data.Version;
...@@ -70,7 +70,7 @@ DgcControllers.controller("footerController", ['$scope','$http', function($scope ...@@ -70,7 +70,7 @@ DgcControllers.controller("footerController", ['$scope','$http', function($scope
DgcControllers.controller("NavController", ['$scope','$http', '$filter', 'sharedProperties', function($scope, $http, $filter, sharedProperties) DgcControllers.controller("NavController", ['$scope','$http', '$filter', 'sharedProperties', function($scope, $http, $filter, sharedProperties)
{ {
$http.get('/api/metadata/types/traits/list') $http.get('/api/atlas/types/traits/list')
.success(function (data) { .success(function (data) {
$scope.iserror1=false; $scope.iserror1=false;
$scope.leftnav=angular.fromJson(data.results); $scope.leftnav=angular.fromJson(data.results);
...@@ -150,7 +150,7 @@ DgcControllers.controller("ListController", ['$scope','$http', '$filter','$state ...@@ -150,7 +150,7 @@ DgcControllers.controller("ListController", ['$scope','$http', '$filter','$state
}); });
$http.get('/api/metadata/discovery/search?query='+$scope.SearchQuery) $http.get('/api/atlas/discovery/search?query='+$scope.SearchQuery)
.success(function (data) { .success(function (data) {
$scope.iserror=false; $scope.iserror=false;
$scope.entities=angular.fromJson(data.results.rows); $scope.entities=angular.fromJson(data.results.rows);
...@@ -227,7 +227,7 @@ DgcControllers.controller("ListController", ['$scope','$http', '$filter','$state ...@@ -227,7 +227,7 @@ DgcControllers.controller("ListController", ['$scope','$http', '$filter','$state
}; };
//click value to textbox //click value to textbox
$scope.getGuidName=function getGuidName(val){ $scope.getGuidName=function getGuidName(val){
$http.get('/api/metadata/entities/definition/'+val) $http.get('/api/atlas/entities/definition/'+val)
.success(function (data) { .success(function (data) {
$scope.iserror1=false; $scope.iserror1=false;
if(!$scope.isUndefined(data.results)){ if(!$scope.isUndefined(data.results)){
...@@ -340,7 +340,7 @@ DgcControllers.controller("DefinitionController", ['$scope','$http', '$statePara ...@@ -340,7 +340,7 @@ DgcControllers.controller("DefinitionController", ['$scope','$http', '$statePara
}; };
//onclick to textbox //onclick to textbox
$scope.getGuidName=function getGuidName(val){ $scope.getGuidName=function getGuidName(val){
$http.get('/api/metadata/entities/definition/'+val) $http.get('/api/atlas/entities/definition/'+val)
.success(function (data) { .success(function (data) {
$scope.iserror1=false; $scope.iserror1=false;
if(!$scope.isUndefined(data.results)){ if(!$scope.isUndefined(data.results)){
...@@ -361,7 +361,7 @@ DgcControllers.controller("DefinitionController", ['$scope','$http', '$statePara ...@@ -361,7 +361,7 @@ DgcControllers.controller("DefinitionController", ['$scope','$http', '$statePara
$scope.searchqry=sharedProperties.getQuery(); $scope.searchqry=sharedProperties.getQuery();
$scope.datatype1=sharedProperties.getProperty(); $scope.datatype1=sharedProperties.getProperty();
$http.get('/api/metadata/entities/definition/'+$stateParams.Id) $http.get('/api/atlas/entities/definition/'+$stateParams.Id)
.success(function (data) { .success(function (data) {
$scope.iserror1=false; $scope.iserror1=false;
$scope.details= angular.fromJson(data.results); $scope.details= angular.fromJson(data.results);
...@@ -381,7 +381,7 @@ DgcControllers.controller("DefinitionController", ['$scope','$http', '$statePara ...@@ -381,7 +381,7 @@ DgcControllers.controller("DefinitionController", ['$scope','$http', '$statePara
$scope.getSchema= function (tableName) { $scope.getSchema= function (tableName) {
$http.get('/api/metadata/lineage/hive/schema/'+tableName) $http.get('/api/atlas/lineage/hive/schema/'+tableName)
.success(function (data) { .success(function (data) {
$scope.iserror1=false; $scope.iserror1=false;
$scope.schema= angular.fromJson(data.results.rows); $scope.schema= angular.fromJson(data.results.rows);
...@@ -403,7 +403,7 @@ $scope.getLinegae= function (tableName) { ...@@ -403,7 +403,7 @@ $scope.getLinegae= function (tableName) {
var arrmyalias=[]; var arrmyalias=[];
var datatypes=[]; var datatypes=[];
var tags=[]; var tags=[];
$http.get('/api/metadata/lineage/hive/outputs/'+tableName) $http.get('/api/atlas/lineage/hive/outputs/'+tableName)
.success(function (data) { .success(function (data) {
$scope.iserror1=false; $scope.iserror1=false;
$scope.lineage= angular.fromJson(data.results.rows); $scope.lineage= angular.fromJson(data.results.rows);
...@@ -445,7 +445,7 @@ $scope.getLinegae= function (tableName) { ...@@ -445,7 +445,7 @@ $scope.getLinegae= function (tableName) {
newarrvts.push(item); newarrvts.push(item);
uniquevts[item.Name] = item; uniquevts[item.Name] = item;
var url="/api/metadata/entities/definition/"+item.Name; var url="/api/atlas/entities/definition/"+item.Name;
arr.push($http.get(url)); arr.push($http.get(url));
} }
}); });
...@@ -795,7 +795,7 @@ $scope.getLinegaeforinput= function (tableName) { ...@@ -795,7 +795,7 @@ $scope.getLinegaeforinput= function (tableName) {
var arrmyalias=[]; var arrmyalias=[];
var datatypes=[]; var datatypes=[];
var tags=[]; var tags=[];
$http.get('/api/metadata/lineage/hive/inputs/'+tableName) $http.get('/api/atlas/lineage/hive/inputs/'+tableName)
.success(function (data) { .success(function (data) {
$scope.iserror1=false; $scope.iserror1=false;
$scope.lineage= angular.fromJson(data.results.rows); $scope.lineage= angular.fromJson(data.results.rows);
...@@ -836,7 +836,7 @@ $scope.getLinegaeforinput= function (tableName) { ...@@ -836,7 +836,7 @@ $scope.getLinegaeforinput= function (tableName) {
newarrvts.push(item); newarrvts.push(item);
uniquevts[item.Name] = item; uniquevts[item.Name] = item;
var url="/api/metadata/entities/definition/"+item.Name; var url="/api/atlas/entities/definition/"+item.Name;
arr.push($http.get(url)); arr.push($http.get(url));
//getLienageGuidName(item.Name); //getLienageGuidName(item.Name);
...@@ -1228,7 +1228,7 @@ DgcControllers.controller("GuidController", ['$scope','$http', '$filter','$state ...@@ -1228,7 +1228,7 @@ DgcControllers.controller("GuidController", ['$scope','$http', '$filter','$state
$scope.getGuidName=function getGuidName(val){ $scope.getGuidName=function getGuidName(val){
$scope.gnew=[]; $scope.gnew=[];
$http.get('/api/metadata/entities/definition/'+val) $http.get('/api/atlas/entities/definition/'+val)
.success(function (data) { .success(function (data) {
$scope.iserror1=false; $scope.iserror1=false;
if(!$scope.isUndefined(data.results)){ if(!$scope.isUndefined(data.results)){
......
...@@ -22,13 +22,13 @@ ...@@ -22,13 +22,13 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-governance</artifactId> <artifactId>apache-atlas</artifactId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
</parent> </parent>
<artifactId>metadata-docs</artifactId> <artifactId>atlas-docs</artifactId>
<description>Apache Metadata Documentation</description> <description>Apache Atlas Documentation</description>
<name>Apache Metadata Documentation</name> <name>Apache Atlas Documentation</name>
<properties> <properties>
<skipTests>true</skipTests> <skipTests>true</skipTests>
......
...@@ -28,11 +28,11 @@ ...@@ -28,11 +28,11 @@
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-governance</artifactId> <artifactId>apache-atlas</artifactId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
<description>Metadata Management and Data Governance Platform over Hadoop</description> <description>Metadata Management and Data Governance Platform over Hadoop</description>
<name>metadata-governance</name> <name>apache-atlas</name>
<packaging>pom</packaging> <packaging>pom</packaging>
<url>http://www.apache.org/</url> <url>http://www.apache.org/</url>
...@@ -489,35 +489,35 @@ ...@@ -489,35 +489,35 @@
<version>${tinkerpop.version}</version> <version>${tinkerpop.version}</version>
</dependency> </dependency>
<!-- metadata modules --> <!-- atlas modules -->
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-typesystem</artifactId> <artifactId>atlas-typesystem</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-repository</artifactId> <artifactId>atlas-repository</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-webapp</artifactId> <artifactId>atlas-webapp</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
<classifier>classes</classifier> <classifier>classes</classifier>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-client</artifactId> <artifactId>atlas-client</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-client</artifactId> <artifactId>atlas-client</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
...@@ -894,7 +894,7 @@ ...@@ -894,7 +894,7 @@
<descriptor>src/main/assemblies/standalone-package.xml</descriptor> <descriptor>src/main/assemblies/standalone-package.xml</descriptor>
<descriptor>src/main/assemblies/src-package.xml</descriptor> <descriptor>src/main/assemblies/src-package.xml</descriptor>
</descriptors> </descriptors>
<finalName>apache-metadata-governance-${project.version}</finalName> <finalName>apache-atlas-${project.version}</finalName>
</configuration> </configuration>
</plugin> </plugin>
......
...@@ -24,24 +24,24 @@ ...@@ -24,24 +24,24 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-governance</artifactId> <artifactId>apache-atlas</artifactId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
</parent> </parent>
<artifactId>metadata-repository</artifactId> <artifactId>atlas-repository</artifactId>
<description>Apache Metadata Repository Module</description> <description>Apache Atlas Repository Module</description>
<name>Apache Metadata Repository</name> <name>Apache Atlas Repository</name>
<packaging>jar</packaging> <packaging>jar</packaging>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-typesystem</artifactId> <artifactId>atlas-typesystem</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-client</artifactId> <artifactId>atlas-client</artifactId>
</dependency> </dependency>
<dependency> <dependency>
......
...@@ -19,27 +19,27 @@ ...@@ -19,27 +19,27 @@
######### Graph Database Configs ######### ######### Graph Database Configs #########
#Refer http://s3.thinkaurelius.com/docs/titan/0.5.1/titan-config-ref.html #Refer http://s3.thinkaurelius.com/docs/titan/0.5.1/titan-config-ref.html
# Graph Storage # Graph Storage
metadata.graph.storage.backend=inmemory atlas.graph.storage.backend=inmemory
# Graph Search Index # Graph Search Index
metadata.graph.index.search.backend=elasticsearch atlas.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=./target/data/es atlas.graph.index.search.directory=./target/data/es
metadata.graph.index.search.elasticsearch.client-only=false atlas.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true atlas.graph.index.search.elasticsearch.local-mode=true
######### Hive Lineage Configs ######### ######### Hive Lineage Configs #########
#metadata.lineage.hive.table.type.name=DataSet #atlas.lineage.hive.table.type.name=DataSet
#metadata.lineage.hive.process.type.name=Process #atlas.lineage.hive.process.type.name=Process
#metadata.lineage.hive.process.inputs.name=inputs #atlas.lineage.hive.process.inputs.name=inputs
#metadata.lineage.hive.process.outputs.name=outputs #atlas.lineage.hive.process.outputs.name=outputs
## Schema ## Schema
#metadata.lineage.hive.table.schema.query=hive_table where name=?, columns #atlas.lineage.hive.table.schema.query=hive_table where name=?, columns
######### Security Properties ######### ######### Security Properties #########
# SSL config # SSL config
metadata.enableTLS=false atlas.enableTLS=false
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
import os import os
import sys import sys
import metadata_config as mc import atlas_config as mc
DEFAULT_JVM_OPTS="-Xmx1024m" DEFAULT_JVM_OPTS="-Xmx1024m"
...@@ -45,7 +45,7 @@ def main(): ...@@ -45,7 +45,7 @@ def main():
+ os.path.join(web_app_dir, "metadata", "WEB-INF", "lib", "*" ) + p \ + os.path.join(web_app_dir, "metadata", "WEB-INF", "lib", "*" ) + p \
+ os.path.join(metadata_home, "libext", "*") + os.path.join(metadata_home, "libext", "*")
process = mc.java("org.apache.hadoop.metadata.util.CredentialProviderUtility", sys.argv[1:], metadata_classpath, jvm_opts_list) process = mc.java("org.apache.atlas.util.CredentialProviderUtility", sys.argv[1:], metadata_classpath, jvm_opts_list)
process.wait() process.wait()
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
import os import os
import sys import sys
import metadata_config as mc import atlas_config as mc
METADATA_LOG_OPTS="-Dmetadata.log.dir=%s" METADATA_LOG_OPTS="-Dmetadata.log.dir=%s"
METADATA_COMMAND_OPTS="-Dmetadata.home=%s" METADATA_COMMAND_OPTS="-Dmetadata.home=%s"
...@@ -51,10 +51,10 @@ def main(): ...@@ -51,10 +51,10 @@ def main():
+ os.path.join(web_app_dir, "metadata", "WEB-INF", "lib", "*" ) + p \ + os.path.join(web_app_dir, "metadata", "WEB-INF", "lib", "*" ) + p \
+ os.path.join(metadata_home, "libext", "*") + os.path.join(metadata_home, "libext", "*")
process = mc.java("org.apache.hadoop.metadata.examples.QuickStart", sys.argv[1:], metadata_classpath, jvm_opts_list) process = mc.java("org.apache.atlas.examples.QuickStart", sys.argv[1:], metadata_classpath, jvm_opts_list)
process.wait() process.wait()
print "Example data added to Metadata Server!!!\n" print "Example data added to Apache Atlas Server!!!\n"
if __name__ == '__main__': if __name__ == '__main__':
try: try:
......
...@@ -18,9 +18,9 @@ ...@@ -18,9 +18,9 @@
<enunciate xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" <enunciate xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="http://enunciate.codehaus.org/schemas/enunciate-1.29.xsd"> xsi:noNamespaceSchemaLocation="http://enunciate.codehaus.org/schemas/enunciate-1.29.xsd">
<api-import pattern="org.apache.hadoop.metadata.web.resources.*"/> <api-import pattern="org.apache.atlas.web.resources.*"/>
<services> <services>
<rest defaultRestSubcontext="/api/metadata/"> <rest defaultRestSubcontext="/api/atlas/">
<custom-resource-parameter-annotation <custom-resource-parameter-annotation
qualifiedName="org.glassfish.jersey.media.multipart.FormDataParam"/> qualifiedName="org.glassfish.jersey.media.multipart.FormDataParam"/>
</rest> </rest>
......
...@@ -18,30 +18,30 @@ ...@@ -18,30 +18,30 @@
######### Graph Database Configs ######### ######### Graph Database Configs #########
# Graph Storage # Graph Storage
metadata.graph.storage.backend=berkeleyje atlas.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=data/berkley atlas.graph.storage.directory=data/berkley
# Graph Search Index # Graph Search Index
metadata.graph.index.search.backend=elasticsearch atlas.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=data/es atlas.graph.index.search.directory=data/es
metadata.graph.index.search.elasticsearch.client-only=false atlas.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true atlas.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=2000 atlas.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs ######### ######### Hive Lineage Configs #########
# This models reflects the base super types for Data and Process # This models reflects the base super types for Data and Process
#metadata.lineage.hive.table.type.name=DataSet #atlas.lineage.hive.table.type.name=DataSet
#metadata.lineage.hive.process.type.name=Process #atlas.lineage.hive.process.type.name=Process
#metadata.lineage.hive.process.inputs.name=inputs #atlas.lineage.hive.process.inputs.name=inputs
#metadata.lineage.hive.process.outputs.name=outputs #atlas.lineage.hive.process.outputs.name=outputs
## Schema ## Schema
#metadata.lineage.hive.table.schema.query=hive_table where name=?, columns #atlas.lineage.hive.table.schema.query=hive_table where name=?, columns
######### Security Properties ######### ######### Security Properties #########
# SSL config # SSL config
metadata.enableTLS=false atlas.enableTLS=false
######### Security Properties ######### ######### Security Properties #########
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
# SSL config # SSL config
metadata.enableTLS=false atlas.enableTLS=false
truststore.file=/path/to/truststore.jks truststore.file=/path/to/truststore.jks
cert.stores.credential.provider.path=jceks://file/path/to/credentialstore.jceks cert.stores.credential.provider.path=jceks://file/path/to/credentialstore.jceks
# following only required for 2-way SSL # following only required for 2-way SSL
...@@ -29,8 +29,8 @@ keystore.file=/path/to/keystore.jks ...@@ -29,8 +29,8 @@ keystore.file=/path/to/keystore.jks
# Authentication config # Authentication config
# enabled: true or false # enabled: true or false
metadata.http.authentication.enabled=false atlas.http.authentication.enabled=false
# type: simple or kerberos # type: simple or kerberos
metadata.http.authentication.type=simple atlas.http.authentication.type=simple
######### Security Properties ######### ######### Security Properties #########
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
</appender> </appender>
<appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender"> <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="${metadata.log.dir}/${metadata.log.file}"/> <param name="File" value="${atlas.log.dir}/${atlas.log.file}"/>
<param name="Append" value="true"/> <param name="Append" value="true"/>
<param name="Threshold" value="debug"/> <param name="Threshold" value="debug"/>
<layout class="org.apache.log4j.PatternLayout"> <layout class="org.apache.log4j.PatternLayout">
...@@ -37,7 +37,7 @@ ...@@ -37,7 +37,7 @@
</appender> </appender>
<appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender"> <appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="${metadata.log.dir}/audit.log"/> <param name="File" value="${atlas.log.dir}/audit.log"/>
<param name="Append" value="true"/> <param name="Append" value="true"/>
<param name="Threshold" value="debug"/> <param name="Threshold" value="debug"/>
<layout class="org.apache.log4j.PatternLayout"> <layout class="org.apache.log4j.PatternLayout">
...@@ -45,7 +45,7 @@ ...@@ -45,7 +45,7 @@
</layout> </layout>
</appender> </appender>
<logger name="org.apache.hadoop.metadata" additivity="false"> <logger name="org.apache.atlas" additivity="false">
<level value="debug"/> <level value="debug"/>
<appender-ref ref="FILE"/> <appender-ref ref="FILE"/>
</logger> </logger>
......
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
<format>tar.gz</format> <format>tar.gz</format>
</formats> </formats>
<id>sources</id> <id>sources</id>
<baseDirectory>apache-metadata-sources-${project.version}</baseDirectory> <baseDirectory>apache-atlas-sources-${project.version}</baseDirectory>
<fileSets> <fileSets>
<fileSet> <fileSet>
<directory>.</directory> <directory>.</directory>
......
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
<format>dir</format> <format>dir</format>
</formats> </formats>
<id>bin</id> <id>bin</id>
<baseDirectory>metadata-${project.version}</baseDirectory> <baseDirectory>apache-atlas-${project.version}</baseDirectory>
<fileSets> <fileSets>
<fileSet> <fileSet>
<directory>src/conf/</directory> <directory>src/conf/</directory>
...@@ -103,9 +103,9 @@ ...@@ -103,9 +103,9 @@
<files> <files>
<file> <file>
<source>webapp/target/metadata-webapp-${project.version}.war</source> <source>webapp/target/atlas-webapp-${project.version}.war</source>
<outputDirectory>server/webapp</outputDirectory> <outputDirectory>server/webapp</outputDirectory>
<destName>metadata.war</destName> <destName>atlas.war</destName>
</file> </file>
</files> </files>
</assembly> </assembly>
...@@ -157,7 +157,7 @@ ...@@ -157,7 +157,7 @@
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"hive_process", "typeName":"hive_process",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
...@@ -256,7 +256,7 @@ ...@@ -256,7 +256,7 @@
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"hive_function", "typeName":"hive_function",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
...@@ -337,7 +337,7 @@ ...@@ -337,7 +337,7 @@
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"hive_type", "typeName":"hive_type",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
...@@ -382,7 +382,7 @@ ...@@ -382,7 +382,7 @@
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"hive_table", "typeName":"hive_table",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
...@@ -508,7 +508,7 @@ ...@@ -508,7 +508,7 @@
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"hive_partition", "typeName":"hive_partition",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
...@@ -580,7 +580,7 @@ ...@@ -580,7 +580,7 @@
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"hive_storagedesc", "typeName":"hive_storagedesc",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
...@@ -688,7 +688,7 @@ ...@@ -688,7 +688,7 @@
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"hive_index", "typeName":"hive_index",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
...@@ -787,7 +787,7 @@ ...@@ -787,7 +787,7 @@
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"hive_role", "typeName":"hive_role",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
...@@ -823,7 +823,7 @@ ...@@ -823,7 +823,7 @@
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"hive_db", "typeName":"hive_db",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
...@@ -886,7 +886,7 @@ ...@@ -886,7 +886,7 @@
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"hive_column", "typeName":"hive_column",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
......
...@@ -22,7 +22,7 @@ import sys ...@@ -22,7 +22,7 @@ import sys
from mock import patch from mock import patch
import unittest import unittest
import logging import logging
import metadata_config as mc import atlas_config as mc
import metadata_start as metadata import metadata_start as metadata
import platform import platform
...@@ -47,13 +47,13 @@ class TestMetadata(unittest.TestCase): ...@@ -47,13 +47,13 @@ class TestMetadata(unittest.TestCase):
self.assertTrue(java_mock.called) self.assertTrue(java_mock.called)
if IS_WINDOWS: if IS_WINDOWS:
java_mock.assert_called_with( java_mock.assert_called_with(
'org.apache.hadoop.metadata.Main', 'org.apache.atlas.Main',
['-app', 'metadata_home/server/webapp/metadata'], ['-app', 'metadata_home/server/webapp/metadata'],
'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib\\*:metadata_home/libext\\*', 'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib\\*:metadata_home/libext\\*',
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs') ['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs')
else: else:
java_mock.assert_called_with( java_mock.assert_called_with(
'org.apache.hadoop.metadata.Main', 'org.apache.atlas.Main',
['-app', 'metadata_home/server/webapp/metadata'], ['-app', 'metadata_home/server/webapp/metadata'],
'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib/*:metadata_home/libext/*', 'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib/*:metadata_home/libext/*',
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs') ['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs')
......
...@@ -24,24 +24,24 @@ ...@@ -24,24 +24,24 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-governance</artifactId> <artifactId>apache-atlas</artifactId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
</parent> </parent>
<artifactId>metadata-tools</artifactId> <artifactId>atlas-tools</artifactId>
<description>Apache Metadata Type System Tools Module</description> <description>Apache Atlas Type System Tools Module</description>
<name>Apache Metadata Type System Tools</name> <name>Apache Atlas Type System Tools</name>
<packaging>jar</packaging> <packaging>jar</packaging>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-typesystem</artifactId> <artifactId>atlas-typesystem</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-repository</artifactId> <artifactId>atlas-repository</artifactId>
</dependency> </dependency>
<dependency> <dependency>
......
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.tools.cli package org.apache.atlas.tools.cli
import org.apache.hadoop.metadata.repository.memory.MemRepository import org.apache.atlas.repository.memory.MemRepository
import org.apache.hadoop.metadata.typesystem.types.TypeSystem import org.apache.atlas.typesystem.types.TypeSystem
import scala.tools.nsc.Settings import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.{ILoop, IMain} import scala.tools.nsc.interpreter.{ILoop, IMain}
...@@ -28,7 +28,7 @@ object Console extends App { ...@@ -28,7 +28,7 @@ object Console extends App {
val settings = new Settings val settings = new Settings
settings.usejavacp.value = true settings.usejavacp.value = true
settings.deprecation.value = true settings.deprecation.value = true
settings.bootclasspath.value += """/Users/hbutani/.m2/repository/org/apache/metadata/1.0-SNAPSHOT/metadata-1.0-SNAPSHOT.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-compiler/2.10.4/scala-compiler-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-reflect/2.10.4/scala-reflect-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/jline/2.10.4/jline-2.10.4.jar:/Users/hbutani/.m2/repository/org/fusesource/jansi/jansi/1.4/jansi-1.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-library/2.10.4/scala-library-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-actors/2.10.4/scala-actors-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scalap/2.10.4/scalap-2.10.4.jar:/Users/hbutani/.m2/repository/org/scalatest/scalatest_2.10/2.2.0/scalatest_2.10-2.2.0.jar:/Users/hbutani/.m2/repository/org/scalamacros/quasiquotes_2.10/2.0.1/quasiquotes_2.10-2.0.1.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-native_2.10/3.2.11/json4s-native_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-core_2.10/3.2.11/json4s-core_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-ast_2.10/3.2.11/json4s-ast_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/com/thoughtworks/paranamer/paranamer/2.6/paranamer-2.6.jar:/Users/hbutani/.m2/repository/com/github/nscala-time/nscala-time_2.10/1.6.0/nscala-time_2.10-1.6.0.jar:/Users/hbutani/.m2/repository/joda-time/joda-time/2.5/joda-time-2.5.jar:/Users/hbutani/.m2/repository/org/joda/joda-convert/1.2/joda-convert-1.2.jar:/Users/hbutani/.m2/repository/com/typesafe/config/1.2.1/config-1.2.1.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-actor_2.10/2.3.7/akka-actor_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-testkit_2.10/2.3.7/akka-testkit_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-slf4j_2.10/2.3.7/akka-slf4j_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.jar:/Users/hbutani/.m2/repository/io/spray/spray-routing/1.3.1/spray-routing-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-http/1.3.1/spray-http-1.3.1.jar:/Users/hbutani/.m2/repository/org/parboiled/parboiled-scala_2.10/1.1.6/parboiled-scala_2.10-1.1.6.jar:/Users/hbutani/.m2/repository/org/parboiled/parboiled-core/1.1.6/parboiled-core-1.1.6.jar:/Users/hbutani/.m2/repository/io/spray/spray-util/1.3.1/spray-util-1.3.1.jar:/Users/hbutani/.m2/repository/com/chuusai/shapeless_2.10/1.2.4/shapeless_2.10-1.2.4.jar:/Users/hbutani/.m2/repository/io/spray/spray-can/1.3.1/spray-can-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-io/1.3.1/spray-io-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-httpx/1.3.1/spray-httpx-1.3.1.jar:/Users/hbutani/.m2/repository/org/jvnet/mimepull/mimepull/1.9.4/mimepull-1.9.4.jar:/Users/hbutani/.m2/repository/io/spray/spray-testkit/1.3.1/spray-testkit-1.3.1.jar:/Users/hbutani/.m2/repository/com/google/guava/guava/11.0.2/guava-11.0.2.jar:/Users/hbutani/.m2/repository/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/Users/hbutani/.m2/repository/junit/junit/4.10/junit-4.10.jar:/Users/hbutani/.m2/repository/org/hamcrest/hamcrest-core/1.1/hamcrest-core-1.1.jar""" settings.bootclasspath.value += """/Users/hbutani/.m2/repository/org/apache/atlas/1.0-SNAPSHOT/atlas-1.0-SNAPSHOT.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-compiler/2.10.4/scala-compiler-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-reflect/2.10.4/scala-reflect-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/jline/2.10.4/jline-2.10.4.jar:/Users/hbutani/.m2/repository/org/fusesource/jansi/jansi/1.4/jansi-1.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-library/2.10.4/scala-library-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-actors/2.10.4/scala-actors-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scalap/2.10.4/scalap-2.10.4.jar:/Users/hbutani/.m2/repository/org/scalatest/scalatest_2.10/2.2.0/scalatest_2.10-2.2.0.jar:/Users/hbutani/.m2/repository/org/scalamacros/quasiquotes_2.10/2.0.1/quasiquotes_2.10-2.0.1.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-native_2.10/3.2.11/json4s-native_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-core_2.10/3.2.11/json4s-core_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-ast_2.10/3.2.11/json4s-ast_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/com/thoughtworks/paranamer/paranamer/2.6/paranamer-2.6.jar:/Users/hbutani/.m2/repository/com/github/nscala-time/nscala-time_2.10/1.6.0/nscala-time_2.10-1.6.0.jar:/Users/hbutani/.m2/repository/joda-time/joda-time/2.5/joda-time-2.5.jar:/Users/hbutani/.m2/repository/org/joda/joda-convert/1.2/joda-convert-1.2.jar:/Users/hbutani/.m2/repository/com/typesafe/config/1.2.1/config-1.2.1.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-actor_2.10/2.3.7/akka-actor_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-testkit_2.10/2.3.7/akka-testkit_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-slf4j_2.10/2.3.7/akka-slf4j_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.jar:/Users/hbutani/.m2/repository/io/spray/spray-routing/1.3.1/spray-routing-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-http/1.3.1/spray-http-1.3.1.jar:/Users/hbutani/.m2/repository/org/parboiled/parboiled-scala_2.10/1.1.6/parboiled-scala_2.10-1.1.6.jar:/Users/hbutani/.m2/repository/org/parboiled/parboiled-core/1.1.6/parboiled-core-1.1.6.jar:/Users/hbutani/.m2/repository/io/spray/spray-util/1.3.1/spray-util-1.3.1.jar:/Users/hbutani/.m2/repository/com/chuusai/shapeless_2.10/1.2.4/shapeless_2.10-1.2.4.jar:/Users/hbutani/.m2/repository/io/spray/spray-can/1.3.1/spray-can-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-io/1.3.1/spray-io-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-httpx/1.3.1/spray-httpx-1.3.1.jar:/Users/hbutani/.m2/repository/org/jvnet/mimepull/mimepull/1.9.4/mimepull-1.9.4.jar:/Users/hbutani/.m2/repository/io/spray/spray-testkit/1.3.1/spray-testkit-1.3.1.jar:/Users/hbutani/.m2/repository/com/google/guava/guava/11.0.2/guava-11.0.2.jar:/Users/hbutani/.m2/repository/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/Users/hbutani/.m2/repository/junit/junit/4.10/junit-4.10.jar:/Users/hbutani/.m2/repository/org/hamcrest/hamcrest-core/1.1/hamcrest-core-1.1.jar"""
val in = new IMain(settings) { val in = new IMain(settings) {
override protected def parentClassLoader = settings.getClass.getClassLoader() override protected def parentClassLoader = settings.getClass.getClassLoader()
...@@ -51,7 +51,7 @@ class SampleILoop extends ILoop { ...@@ -51,7 +51,7 @@ class SampleILoop extends ILoop {
intp.addImports("org.json4s.native.Serialization.{read, write => swrite}") intp.addImports("org.json4s.native.Serialization.{read, write => swrite}")
intp.addImports("org.json4s._") intp.addImports("org.json4s._")
intp.addImports("org.json4s.native.JsonMethods._") intp.addImports("org.json4s.native.JsonMethods._")
intp.addImports("org.apache.hadoop.metadata.tools.dsl._") intp.addImports("org.apache.atlas.tools.dsl._")
//intp.bindValue("service", ms) //intp.bindValue("service", ms)
//intp.bindValue("cp", intp.compilerClasspath) //intp.bindValue("cp", intp.compilerClasspath)
} }
......
...@@ -24,13 +24,13 @@ ...@@ -24,13 +24,13 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-governance</artifactId> <artifactId>apache-atlas</artifactId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
</parent> </parent>
<artifactId>metadata-typesystem</artifactId> <artifactId>atlas-typesystem</artifactId>
<description>Apache Metadata Typesystem Module</description> <description>Apache Atlas Typesystem Module</description>
<name>Apache Metadata Typesystem</name> <name>Apache Atlas Typesystem</name>
<packaging>jar</packaging> <packaging>jar</packaging>
<dependencies> <dependencies>
......
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.typesystem; package org.apache.atlas.typesystem;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.typesystem.types.FieldMapping; import org.apache.atlas.typesystem.types.FieldMapping;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
...@@ -37,49 +37,49 @@ public interface ITypedInstance extends IInstance { ...@@ -37,49 +37,49 @@ public interface ITypedInstance extends IInstance {
FieldMapping fieldMapping(); FieldMapping fieldMapping();
public void setNull(String attrName) throws MetadataException; void setNull(String attrName) throws MetadataException;
public boolean getBoolean(String attrName) throws MetadataException; boolean getBoolean(String attrName) throws MetadataException;
public byte getByte(String attrName) throws MetadataException; byte getByte(String attrName) throws MetadataException;
public short getShort(String attrName) throws MetadataException; short getShort(String attrName) throws MetadataException;
public int getInt(String attrName) throws MetadataException; int getInt(String attrName) throws MetadataException;
public long getLong(String attrName) throws MetadataException; long getLong(String attrName) throws MetadataException;
public float getFloat(String attrName) throws MetadataException; float getFloat(String attrName) throws MetadataException;
public double getDouble(String attrName) throws MetadataException; double getDouble(String attrName) throws MetadataException;
public BigInteger getBigInt(String attrName) throws MetadataException; BigInteger getBigInt(String attrName) throws MetadataException;
public BigDecimal getBigDecimal(String attrName) throws MetadataException; BigDecimal getBigDecimal(String attrName) throws MetadataException;
public Date getDate(String attrName) throws MetadataException; Date getDate(String attrName) throws MetadataException;
public String getString(String attrName) throws MetadataException; String getString(String attrName) throws MetadataException;
public void setBoolean(String attrName, boolean val) throws MetadataException; void setBoolean(String attrName, boolean val) throws MetadataException;
public void setByte(String attrName, byte val) throws MetadataException; void setByte(String attrName, byte val) throws MetadataException;
public void setShort(String attrName, short val) throws MetadataException; void setShort(String attrName, short val) throws MetadataException;
public void setInt(String attrName, int val) throws MetadataException; void setInt(String attrName, int val) throws MetadataException;
public void setLong(String attrName, long val) throws MetadataException; void setLong(String attrName, long val) throws MetadataException;
public void setFloat(String attrName, float val) throws MetadataException; void setFloat(String attrName, float val) throws MetadataException;
public void setDouble(String attrName, double val) throws MetadataException; void setDouble(String attrName, double val) throws MetadataException;
public void setBigInt(String attrName, BigInteger val) throws MetadataException; void setBigInt(String attrName, BigInteger val) throws MetadataException;
public void setBigDecimal(String attrName, BigDecimal val) throws MetadataException; void setBigDecimal(String attrName, BigDecimal val) throws MetadataException;
public void setDate(String attrName, Date val) throws MetadataException; void setDate(String attrName, Date val) throws MetadataException;
public void setString(String attrName, String val) throws MetadataException; void setString(String attrName, String val) throws MetadataException;
} }
...@@ -18,8 +18,8 @@ ...@@ -18,8 +18,8 @@
######### Graph Database Configs ######### ######### Graph Database Configs #########
# Graph Storage # Graph Storage
metadata.graph.storage.backend=inmemory atlas.graph.storage.backend=inmemory
# Graph Search Index # Graph Search Index
metadata.graph.index.search.backend=lucene atlas.graph.index.search.backend=lucene
metadata.graph.index.search.directory=target/data/lucene atlas.graph.index.search.directory=target/data/lucene
...@@ -64,7 +64,7 @@ ...@@ -64,7 +64,7 @@
<appender-ref ref="console"/> <appender-ref ref="console"/>
</logger> </logger>
<logger name="org.apache.hadoop.metadata" additivity="false"> <logger name="org.apache.atlas" additivity="false">
<level value="debug"/> <level value="debug"/>
<appender-ref ref="console"/> <appender-ref ref="console"/>
</logger> </logger>
......
...@@ -324,7 +324,7 @@ ...@@ -324,7 +324,7 @@
"B", "B",
"C" "C"
], ],
"hierarchicalMetaTypeName": "org.apache.hadoop.metadata.types.TraitType", "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
"typeName": "D", "typeName": "D",
"attributeDefinitions": [ "attributeDefinitions": [
{ {
...@@ -339,7 +339,7 @@ ...@@ -339,7 +339,7 @@
{ {
"superTypes": [ "superTypes": [
], ],
"hierarchicalMetaTypeName": "org.apache.hadoop.metadata.types.TraitType", "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
"typeName": "A", "typeName": "A",
"attributeDefinitions": [ "attributeDefinitions": [
{ {
...@@ -376,7 +376,7 @@ ...@@ -376,7 +376,7 @@
"superTypes": [ "superTypes": [
"A" "A"
], ],
"hierarchicalMetaTypeName": "org.apache.hadoop.metadata.types.TraitType", "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
"typeName": "B", "typeName": "B",
"attributeDefinitions": [ "attributeDefinitions": [
{ {
...@@ -392,7 +392,7 @@ ...@@ -392,7 +392,7 @@
"superTypes": [ "superTypes": [
"A" "A"
], ],
"hierarchicalMetaTypeName": "org.apache.hadoop.metadata.types.TraitType", "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
"typeName": "C", "typeName": "C",
"attributeDefinitions": [ "attributeDefinitions": [
{ {
...@@ -407,7 +407,7 @@ ...@@ -407,7 +407,7 @@
{ {
"superTypes": [ "superTypes": [
], ],
"hierarchicalMetaTypeName": "org.apache.hadoop.metadata.types.TraitType", "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
"typeName": "SecurityClearance", "typeName": "SecurityClearance",
"attributeDefinitions": [ "attributeDefinitions": [
{ {
...@@ -425,7 +425,7 @@ ...@@ -425,7 +425,7 @@
"superTypes": [ "superTypes": [
"Person" "Person"
], ],
"hierarchicalMetaTypeName": "org.apache.hadoop.metadata.types.ClassType", "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
"typeName": "Manager", "typeName": "Manager",
"attributeDefinitions": [ "attributeDefinitions": [
{ {
...@@ -440,7 +440,7 @@ ...@@ -440,7 +440,7 @@
{ {
"superTypes": [ "superTypes": [
], ],
"hierarchicalMetaTypeName": "org.apache.hadoop.metadata.types.ClassType", "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
"typeName": "Department", "typeName": "Department",
"attributeDefinitions": [ "attributeDefinitions": [
{ {
...@@ -462,7 +462,7 @@ ...@@ -462,7 +462,7 @@
{ {
"superTypes": [ "superTypes": [
], ],
"hierarchicalMetaTypeName": "org.apache.hadoop.metadata.types.ClassType", "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
"typeName": "t4", "typeName": "t4",
"attributeDefinitions": [ "attributeDefinitions": [
{ {
...@@ -603,7 +603,7 @@ ...@@ -603,7 +603,7 @@
{ {
"superTypes": [ "superTypes": [
], ],
"hierarchicalMetaTypeName": "org.apache.hadoop.metadata.types.ClassType", "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
"typeName": "Person", "typeName": "Person",
"attributeDefinitions": [ "attributeDefinitions": [
{ {
......
...@@ -24,13 +24,13 @@ ...@@ -24,13 +24,13 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-governance</artifactId> <artifactId>apache-atlas</artifactId>
<version>0.1-incubating-SNAPSHOT</version> <version>0.1-incubating-SNAPSHOT</version>
</parent> </parent>
<artifactId>metadata-webapp</artifactId> <artifactId>atlas-webapp</artifactId>
<description>Apache Metadata Web Application</description> <description>Apache Atlas Web Application</description>
<name>Apache Metadata Web Application</name> <name>Apache Atlas Web Application</name>
<packaging>war</packaging> <packaging>war</packaging>
<properties> <properties>
...@@ -39,18 +39,18 @@ ...@@ -39,18 +39,18 @@
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-typesystem</artifactId> <artifactId>atlas-typesystem</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-repository</artifactId> <artifactId>atlas-repository</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.atlas</groupId>
<artifactId>metadata-client</artifactId> <artifactId>atlas-client</artifactId>
<type>test-jar</type> <type>test-jar</type>
</dependency> </dependency>
...@@ -198,7 +198,7 @@ ...@@ -198,7 +198,7 @@
<includeTransitiveDependencySources>false</includeTransitiveDependencySources> <includeTransitiveDependencySources>false</includeTransitiveDependencySources>
<includeDependencySources>true</includeDependencySources> <includeDependencySources>true</includeDependencySources>
<dependencySourceIncludes> <dependencySourceIncludes>
<dependencySourceInclude>org.apache.hadoop.metadata:*</dependencySourceInclude> <dependencySourceInclude>org.apache.atlas:*</dependencySourceInclude>
</dependencySourceIncludes> </dependencySourceIncludes>
</configuration> </configuration>
</execution> </execution>
...@@ -260,11 +260,11 @@ ...@@ -260,11 +260,11 @@
</execution> </execution>
</executions> </executions>
<configuration> <configuration>
<dname>cn=metadata.incubator.apache.org</dname> <dname>cn=atlas.incubator.apache.org</dname>
<keystore>${project.build.directory}/metadata.keystore</keystore> <keystore>${project.build.directory}/atlas.keystore</keystore>
<keypass>keypass</keypass> <keypass>keypass</keypass>
<storepass>keypass</storepass> <storepass>keypass</storepass>
<alias>metadata</alias> <alias>atlas</alias>
<keyalg>RSA</keyalg> <keyalg>RSA</keyalg>
<validity>100000</validity> <validity>100000</validity>
</configuration> </configuration>
...@@ -282,9 +282,9 @@ ...@@ -282,9 +282,9 @@
<connector implementation="org.mortbay.jetty.security.SslSocketConnector"> <connector implementation="org.mortbay.jetty.security.SslSocketConnector">
<port>21443</port> <port>21443</port>
<maxIdleTime>60000</maxIdleTime> <maxIdleTime>60000</maxIdleTime>
<keystore>${project.build.directory}/../../webapp/target/metadata.keystore</keystore> <keystore>${project.build.directory}/../../webapp/target/atlas.keystore</keystore>
<keyPassword>metadata-passwd</keyPassword> <keyPassword>atlas-passwd</keyPassword>
<password>metadata-passwd</password> <password>atlas-passwd</password>
</connector> </connector>
--> -->
<connector implementation="org.mortbay.jetty.nio.SelectChannelConnector"> <connector implementation="org.mortbay.jetty.nio.SelectChannelConnector">
...@@ -292,29 +292,29 @@ ...@@ -292,29 +292,29 @@
<maxIdleTime>60000</maxIdleTime> <maxIdleTime>60000</maxIdleTime>
</connector> </connector>
</connectors> </connectors>
<webApp>${project.build.directory}/metadata-webapp-${project.version}</webApp> <webApp>${project.build.directory}/atlas-webapp-${project.version}</webApp>
<contextPath>/</contextPath> <contextPath>/</contextPath>
<useTestClasspath>true</useTestClasspath> <useTestClasspath>true</useTestClasspath>
<systemProperties> <systemProperties>
<systemProperty> <systemProperty>
<name>metadata.log.dir</name> <name>atlas.log.dir</name>
<value>${project.build.directory}/logs</value> <value>${project.build.directory}/logs</value>
</systemProperty> </systemProperty>
<systemProperty> <systemProperty>
<name>keystore.file</name> <name>keystore.file</name>
<value>${project.build.directory}/../../webapp/target/metadata.keystore <value>${project.build.directory}/../../webapp/target/atlas.keystore
</value> </value>
</systemProperty> </systemProperty>
<systemProperty> <systemProperty>
<name>truststore.file</name> <name>truststore.file</name>
<value>${project.build.directory}/../../webapp/target/metadata.keystore</value> <value>${project.build.directory}/../../webapp/target/atlas.keystore </value>
</systemProperty> </systemProperty>
<systemProperty> <systemProperty>
<name>metadata.home</name> <name>atlas.home</name>
<value>${project.build.directory}</value> <value>${project.build.directory}</value>
</systemProperty> </systemProperty>
</systemProperties> </systemProperties>
<stopKey>metadata-stop</stopKey> <stopKey>atlas-stop</stopKey>
<stopPort>41001</stopPort> <stopPort>41001</stopPort>
<daemon>${debug.jetty.daemon}</daemon> <daemon>${debug.jetty.daemon}</daemon>
</configuration> </configuration>
......
...@@ -18,30 +18,30 @@ ...@@ -18,30 +18,30 @@
######### Graph Database Configs ######### ######### Graph Database Configs #########
# Graph Storage # Graph Storage
metadata.graph.storage.backend=berkeleyje atlas.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=target/data/berkley atlas.graph.storage.directory=target/data/berkley
# Graph Search Index # Graph Search Index
metadata.graph.index.search.backend=elasticsearch atlas.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=target/data/es atlas.graph.index.search.directory=target/data/es
metadata.graph.index.search.elasticsearch.client-only=false atlas.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true atlas.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=2000 atlas.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs ######### ######### Hive Lineage Configs #########
# This models reflects the base super types for Data and Process # This models reflects the base super types for Data and Process
#metadata.lineage.hive.table.type.name=DataSet #atlas.lineage.hive.table.type.name=DataSet
#metadata.lineage.hive.process.type.name=Process #atlas.lineage.hive.process.type.name=Process
#metadata.lineage.hive.process.inputs.name=inputs #atlas.lineage.hive.process.inputs.name=inputs
#metadata.lineage.hive.process.outputs.name=outputs #atlas.lineage.hive.process.outputs.name=outputs
## Schema ## Schema
#metadata.lineage.hive.table.schema.query=hive_table where name=?, columns #atlas.lineage.hive.table.schema.query=hive_table where name=?, columns
######### Security Properties ######### ######### Security Properties #########
# SSL config # SSL config
metadata.enableTLS=false atlas.enableTLS=false
######### Security Properties ######### ######### Security Properties #########
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
</appender> </appender>
<appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender"> <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="${metadata.log.dir}/application.log"/> <param name="File" value="${atlas.log.dir}/application.log"/>
<param name="Append" value="true"/> <param name="Append" value="true"/>
<param name="Threshold" value="debug"/> <param name="Threshold" value="debug"/>
<layout class="org.apache.log4j.PatternLayout"> <layout class="org.apache.log4j.PatternLayout">
...@@ -37,7 +37,7 @@ ...@@ -37,7 +37,7 @@
</appender> </appender>
<appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender"> <appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="${metadata.log.dir}/audit.log"/> <param name="File" value="${atlas.log.dir}/audit.log"/>
<param name="Append" value="true"/> <param name="Append" value="true"/>
<param name="Threshold" value="debug"/> <param name="Threshold" value="debug"/>
<layout class="org.apache.log4j.PatternLayout"> <layout class="org.apache.log4j.PatternLayout">
...@@ -45,7 +45,7 @@ ...@@ -45,7 +45,7 @@
</layout> </layout>
</appender> </appender>
<logger name="org.apache.hadoop.metadata" additivity="false"> <logger name="org.apache.atlas" additivity="false">
<level value="debug"/> <level value="debug"/>
<appender-ref ref="FILE"/> <appender-ref ref="FILE"/>
</logger> </logger>
......
...@@ -21,13 +21,13 @@ ...@@ -21,13 +21,13 @@
"http://java.sun.com/dtd/web-app_2_3.dtd"> "http://java.sun.com/dtd/web-app_2_3.dtd">
<web-app> <web-app>
<display-name>Apache Metadata Placeholder</display-name> <display-name>Apache Atlas</display-name>
<description>Apache Metadata Placeholder</description> <description>Metadata Management and Data Governance Platform over Hadoop</description>
<context-param> <context-param>
<param-name>guice.packages</param-name> <param-name>guice.packages</param-name>
<param-value> <param-value>
org.apache.hadoop.metadata.web.resources,org.apache.hadoop.metadata.web.params org.apache.atlas.web.resources,org.apache.atlas.web.params
</param-value> </param-value>
</context-param> </context-param>
...@@ -48,6 +48,6 @@ ...@@ -48,6 +48,6 @@
</filter-mapping> </filter-mapping>
<listener> <listener>
<listener-class>org.apache.hadoop.metadata.web.listeners.GuiceServletConfig</listener-class> <listener-class>org.apache.atlas.web.listeners.GuiceServletConfig</listener-class>
</listener> </listener>
</web-app> </web-app>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment