Commit 30711973 by Venkatesh Seetharam

Refactor packages and scripts to Atlas

(cherry picked from commit 414beba5)
parent 26048109
...@@ -16,9 +16,20 @@ ...@@ -16,9 +16,20 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.metadata.falcon; package org.apache.atlas.falcon;
import com.google.inject.Inject; import com.google.inject.Inject;
import org.apache.atlas.MetadataException;
import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.typesystem.ITypedInstance;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.types.EnumType;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.StructType;
import org.apache.atlas.typesystem.types.TraitType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.commons.lang.StringUtils;
import org.apache.falcon.client.FalconCLIException; import org.apache.falcon.client.FalconCLIException;
import org.apache.falcon.client.FalconClient; import org.apache.falcon.client.FalconClient;
import org.apache.falcon.entity.v0.Entity; import org.apache.falcon.entity.v0.Entity;
...@@ -29,17 +40,6 @@ import org.apache.falcon.entity.v0.cluster.Location; ...@@ -29,17 +40,6 @@ import org.apache.falcon.entity.v0.cluster.Location;
import org.apache.falcon.entity.v0.cluster.Properties; import org.apache.falcon.entity.v0.cluster.Properties;
import org.apache.falcon.entity.v0.cluster.Property; import org.apache.falcon.entity.v0.cluster.Property;
import org.apache.falcon.resource.EntityList; import org.apache.falcon.resource.EntityList;
import org.apache.hadoop.metadata.ITypedInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.Struct;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.typesystem.types.EnumType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructType;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.parboiled.common.StringUtils;
import javax.xml.bind.JAXBException; import javax.xml.bind.JAXBException;
import java.io.StringReader; import java.io.StringReader;
...@@ -127,7 +127,7 @@ public class FalconImporter { ...@@ -127,7 +127,7 @@ public class FalconImporter {
} }
clusterRef.set("interfaces", interfaces); clusterRef.set("interfaces", interfaces);
} }
repository.createEntity(clusterRef, clusterRef.getTypeName()); repository.createEntity(clusterRef);
} }
} catch (Exception e) { } catch (Exception e) {
throw new MetadataException(e); throw new MetadataException(e);
......
...@@ -16,20 +16,20 @@ ...@@ -16,20 +16,20 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.metadata.falcon; package org.apache.atlas.falcon;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.types.EnumTypeDefinition; import org.apache.atlas.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.types.EnumValue; import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.types.StructTypeDefinition; import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -39,6 +39,7 @@ import java.util.List; ...@@ -39,6 +39,7 @@ import java.util.List;
public class FalconTypeSystem { public class FalconTypeSystem {
public static final Logger LOG = LoggerFactory.getLogger(FalconTypeSystem.class); public static final Logger LOG = LoggerFactory.getLogger(FalconTypeSystem.class);
public static final TypeSystem TYPE_SYSTEM = TypeSystem.getInstance(); public static final TypeSystem TYPE_SYSTEM = TypeSystem.getInstance();
private static FalconTypeSystem INSTANCE;
private List<StructTypeDefinition> structTypeDefinitions = new ArrayList<>(); private List<StructTypeDefinition> structTypeDefinitions = new ArrayList<>();
private List<HierarchicalTypeDefinition<TraitType>> traitTypeDefinitions = new ArrayList<>(); private List<HierarchicalTypeDefinition<TraitType>> traitTypeDefinitions = new ArrayList<>();
...@@ -62,14 +63,6 @@ public class FalconTypeSystem { ...@@ -62,14 +63,6 @@ public class FalconTypeSystem {
return INSTANCE; return INSTANCE;
} }
private FalconTypeSystem() throws MetadataException {
HierarchicalTypeDefinition<ClassType> cluster = defineCluster();
//TODO define feed and process
TYPE_SYSTEM.defineTypes(ImmutableList.copyOf(structTypeDefinitions), ImmutableList.copyOf(traitTypeDefinitions),
ImmutableList.of(cluster));
}
private HierarchicalTypeDefinition<ClassType> defineCluster() throws MetadataException { private HierarchicalTypeDefinition<ClassType> defineCluster() throws MetadataException {
defineACL(); defineACL();
defineClusterInterface(); defineClusterInterface();
......
...@@ -16,10 +16,11 @@ ...@@ -16,10 +16,11 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.metadata.falcon; package org.apache.atlas.falcon;
import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.apache.falcon.client.FalconCLIException;
import org.apache.falcon.client.FalconClient; import org.apache.falcon.client.FalconClient;
import org.apache.falcon.entity.v0.EntityType; import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.cluster.Cluster; import org.apache.falcon.entity.v0.cluster.Cluster;
...@@ -29,9 +30,6 @@ import org.apache.falcon.entity.v0.cluster.Interfacetype; ...@@ -29,9 +30,6 @@ import org.apache.falcon.entity.v0.cluster.Interfacetype;
import org.apache.falcon.entity.v0.cluster.Location; import org.apache.falcon.entity.v0.cluster.Location;
import org.apache.falcon.entity.v0.cluster.Locations; import org.apache.falcon.entity.v0.cluster.Locations;
import org.apache.falcon.resource.EntityList; import org.apache.falcon.resource.EntityList;
import org.apache.hadoop.metadata.IReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.io.StringWriter; import java.io.StringWriter;
...@@ -50,7 +48,8 @@ public class FalconImporterTest { ...@@ -50,7 +48,8 @@ public class FalconImporterTest {
FalconTypeSystem.getInstance(); FalconTypeSystem.getInstance();
FalconImporter importer = new FalconImporter(client, repo); FalconImporter importer = new FalconImporter(client, repo);
when(client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null, null)).thenReturn(getEntityList()); when(client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null,
null)).thenReturn(getEntityList());
//TODO Set other fields in cluster //TODO Set other fields in cluster
when(client.getDefinition(anyString(), anyString())).thenReturn(getCluster()); when(client.getDefinition(anyString(), anyString())).thenReturn(getCluster());
when(repo.createEntity(any(IReferenceableInstance.class), anyString())).thenReturn(UUID.randomUUID().toString()); when(repo.createEntity(any(IReferenceableInstance.class), anyString())).thenReturn(UUID.randomUUID().toString());
......
...@@ -16,12 +16,12 @@ ...@@ -16,12 +16,12 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.metadata.falcon; package org.apache.atlas.falcon;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.junit.Assert; import org.junit.Assert;
import org.testng.annotations.Test; import org.testng.annotations.Test;
......
...@@ -16,8 +16,16 @@ ...@@ -16,8 +16,16 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.hive.bridge; package org.apache.atlas.hive.bridge;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.MetadataServiceException;
import org.apache.atlas.hive.model.HiveDataModelGenerator;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.json.InstanceSerialization;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
...@@ -31,14 +39,6 @@ import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; ...@@ -31,14 +39,6 @@ import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.MetadataServiceException;
import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
......
...@@ -16,10 +16,13 @@ ...@@ -16,10 +16,13 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.hive.hook; package org.apache.atlas.hive.hook;
import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.QueryPlan;
...@@ -32,9 +35,6 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; ...@@ -32,9 +35,6 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.json.JSONObject; import org.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
......
...@@ -16,25 +16,25 @@ ...@@ -16,25 +16,25 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.hive.model; package org.apache.atlas.hive.model;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.atlas.MetadataServiceClient;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.atlas.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization; import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumType; import org.apache.atlas.typesystem.types.EnumType;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition; import org.apache.atlas.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumValue; import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructType; import org.apache.atlas.typesystem.types.StructType;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeUtils; import org.apache.atlas.typesystem.types.TypeUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.hive.model; package org.apache.atlas.hive.model;
/** /**
* Hive Data Types for model and bridge. * Hive Data Types for model and bridge.
......
...@@ -14,16 +14,16 @@ ...@@ -14,16 +14,16 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.hive.hook; package org.apache.atlas.hive.hook;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.security.BaseSecurityTest;
import org.apache.atlas.web.service.SecureEmbeddedServer;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.security.BaseSecurityTest;
import org.apache.hadoop.metadata.web.service.SecureEmbeddedServer;
import org.apache.hadoop.security.alias.CredentialProvider; import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.mortbay.jetty.Server; import org.mortbay.jetty.Server;
...@@ -31,9 +31,9 @@ import org.mortbay.jetty.Server; ...@@ -31,9 +31,9 @@ import org.mortbay.jetty.Server;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import static org.apache.hadoop.metadata.security.SecurityProperties.KEYSTORE_PASSWORD_KEY; import static org.apache.atlas.security.SecurityProperties.KEYSTORE_PASSWORD_KEY;
import static org.apache.hadoop.metadata.security.SecurityProperties.SERVER_CERT_PASSWORD_KEY; import static org.apache.atlas.security.SecurityProperties.SERVER_CERT_PASSWORD_KEY;
import static org.apache.hadoop.metadata.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY; import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY;
/** /**
* *
...@@ -123,7 +123,7 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest { ...@@ -123,7 +123,7 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest {
} }
protected String getWarPath() { protected String getWarPath() {
return String.format("/../../webapp/target/metadata-webapp-%s", return String.format("/../../webapp/target/atlas-webapp-%s",
System.getProperty("project.version", "0.1-incubating-SNAPSHOT")); System.getProperty("project.version", "0.1-incubating-SNAPSHOT"));
} }
...@@ -132,7 +132,8 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest { ...@@ -132,7 +132,8 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest {
hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, ""); hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName()); hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore"); hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE,
System.getProperty("user.dir") + "/target/atlas");
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL); hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true"); hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true"); hiveConf.set("hive.hook.dgi.synchronous", "true");
......
...@@ -16,8 +16,14 @@ ...@@ -16,8 +16,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.hive.hook; package org.apache.atlas.hive.hook;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataModelGenerator;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
...@@ -25,12 +31,6 @@ import org.apache.hadoop.hive.conf.HiveConf; ...@@ -25,12 +31,6 @@ import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
......
...@@ -16,16 +16,16 @@ ...@@ -16,16 +16,16 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.hive.hook; package org.apache.atlas.hive.hook;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.security.SecurityProperties;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.security.SecurityProperties;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider; import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.ssl.SSLHostnameVerifier; import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
...@@ -40,7 +40,10 @@ import java.io.FileWriter; ...@@ -40,7 +40,10 @@ import java.io.FileWriter;
import java.net.URL; import java.net.URL;
import java.nio.file.Files; import java.nio.file.Files;
import static org.apache.hadoop.metadata.security.SecurityProperties.*; import static org.apache.atlas.security.SecurityProperties.CERT_STORES_CREDENTIAL_PROVIDER_PATH;
import static org.apache.atlas.security.SecurityProperties.KEYSTORE_FILE_KEY;
import static org.apache.atlas.security.SecurityProperties.TLS_ENABLED;
import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_FILE_KEY;
/** /**
* Perform all the necessary setup steps for client and server comm over SSL/Kerberos, but then don't estalish a * Perform all the necessary setup steps for client and server comm over SSL/Kerberos, but then don't estalish a
...@@ -85,15 +88,15 @@ public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest { ...@@ -85,15 +88,15 @@ public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
// client will actually only leverage subset of these properties // client will actually only leverage subset of these properties
final PropertiesConfiguration configuration = new PropertiesConfiguration(); final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(TLS_ENABLED, true); configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/metadata.keystore"); configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/metadata.keystore"); configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl); configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty("metadata.http.authentication.type", "kerberos"); configuration.setProperty("atlas.http.authentication.type", "kerberos");
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString()); configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties")); configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
String confLocation = System.getProperty("metadata.conf"); String confLocation = System.getProperty("atlas.conf");
URL url; URL url;
if (confLocation == null) { if (confLocation == null) {
url = PropertiesUtil.class.getResource("/application.properties"); url = PropertiesUtil.class.getResource("/application.properties");
...@@ -102,15 +105,15 @@ public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest { ...@@ -102,15 +105,15 @@ public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
} }
configuration.load(url); configuration.load(url);
configuration.setProperty(TLS_ENABLED, true); configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty("metadata.http.authentication.enabled", "true"); configuration.setProperty("atlas.http.authentication.enabled", "true");
configuration.setProperty("metadata.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm()); configuration.setProperty("atlas.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm());
configuration.setProperty("metadata.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath()); configuration.setProperty("atlas.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath());
configuration.setProperty("metadata.http.authentication.kerberos.name.rules", configuration.setProperty("atlas.http.authentication.kerberos.name.rules",
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT"); "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");
configuration.save(new FileWriter(persistDir + File.separator + "application.properties")); configuration.save(new FileWriter(persistDir + File.separator + "application.properties"));
secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/metadata-governance") { secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/apache-atlas") {
@Override @Override
public PropertiesConfiguration getConfiguration() { public PropertiesConfiguration getConfiguration() {
return configuration; return configuration;
...@@ -122,8 +125,8 @@ public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest { ...@@ -122,8 +125,8 @@ public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
secureEmbeddedServer.getServer().setHandler(webapp); secureEmbeddedServer.getServer().setHandler(webapp);
// save original setting // save original setting
originalConf = System.getProperty("metadata.conf"); originalConf = System.getProperty("atlas.conf");
System.setProperty("metadata.conf", persistDir); System.setProperty("atlas.conf", persistDir);
secureEmbeddedServer.getServer().start(); secureEmbeddedServer.getServer().start();
} }
...@@ -139,7 +142,7 @@ public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest { ...@@ -139,7 +142,7 @@ public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
} }
if (originalConf != null) { if (originalConf != null) {
System.setProperty("metadata.conf", originalConf); System.setProperty("atlas.conf", originalConf);
} }
} }
......
...@@ -16,24 +16,23 @@ ...@@ -16,24 +16,23 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.hive.hook; package org.apache.atlas.hive.hook;
import org.apache.atlas.MetadataException;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.security.SecurityProperties;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.security.SecurityProperties;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider; import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.ssl.SSLHostnameVerifier; import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.mortbay.jetty.webapp.WebAppContext; import org.mortbay.jetty.webapp.WebAppContext;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.AfterClass; import org.testng.annotations.AfterClass;
...@@ -41,7 +40,11 @@ import org.testng.annotations.BeforeClass; ...@@ -41,7 +40,11 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import javax.security.auth.Subject; import javax.security.auth.Subject;
import javax.security.auth.callback.*; import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException; import javax.security.auth.login.LoginException;
import java.io.File; import java.io.File;
...@@ -51,7 +54,10 @@ import java.net.URL; ...@@ -51,7 +54,10 @@ import java.net.URL;
import java.nio.file.Files; import java.nio.file.Files;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import static org.apache.hadoop.metadata.security.SecurityProperties.*; import static org.apache.atlas.security.SecurityProperties.CERT_STORES_CREDENTIAL_PROVIDER_PATH;
import static org.apache.atlas.security.SecurityProperties.KEYSTORE_FILE_KEY;
import static org.apache.atlas.security.SecurityProperties.TLS_ENABLED;
import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_FILE_KEY;
public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest { public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
public static final String TEST_USER_JAAS_SECTION = "TestUser"; public static final String TEST_USER_JAAS_SECTION = "TestUser";
...@@ -97,15 +103,15 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest { ...@@ -97,15 +103,15 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
// client will actually only leverage subset of these properties // client will actually only leverage subset of these properties
final PropertiesConfiguration configuration = new PropertiesConfiguration(); final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(TLS_ENABLED, true); configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/metadata.keystore"); configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/metadata.keystore"); configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl); configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty("metadata.http.authentication.type", "kerberos"); configuration.setProperty("atlas.http.authentication.type", "kerberos");
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString()); configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties")); configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
String confLocation = System.getProperty("metadata.conf"); String confLocation = System.getProperty("atlas.conf");
URL url; URL url;
if (confLocation == null) { if (confLocation == null) {
url = PropertiesUtil.class.getResource("/application.properties"); url = PropertiesUtil.class.getResource("/application.properties");
...@@ -114,10 +120,10 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest { ...@@ -114,10 +120,10 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
} }
configuration.load(url); configuration.load(url);
configuration.setProperty(TLS_ENABLED, true); configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty("metadata.http.authentication.enabled", "true"); configuration.setProperty("atlas.http.authentication.enabled", "true");
configuration.setProperty("metadata.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm()); configuration.setProperty("atlas.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm());
configuration.setProperty("metadata.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath()); configuration.setProperty("atlas.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath());
configuration.setProperty("metadata.http.authentication.kerberos.name.rules", configuration.setProperty("atlas.http.authentication.kerberos.name.rules",
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT"); "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");
configuration.save(new FileWriter(persistDir + File.separator + "application.properties")); configuration.save(new FileWriter(persistDir + File.separator + "application.properties"));
...@@ -129,7 +135,7 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest { ...@@ -129,7 +135,7 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
} }
}; };
secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/metadata-governance") { secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/apache-atlas") {
@Override @Override
public PropertiesConfiguration getConfiguration() { public PropertiesConfiguration getConfiguration() {
return configuration; return configuration;
...@@ -141,8 +147,8 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest { ...@@ -141,8 +147,8 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
secureEmbeddedServer.getServer().setHandler(webapp); secureEmbeddedServer.getServer().setHandler(webapp);
// save original setting // save original setting
originalConf = System.getProperty("metadata.conf"); originalConf = System.getProperty("atlas.conf");
System.setProperty("metadata.conf", persistDir); System.setProperty("atlas.conf", persistDir);
secureEmbeddedServer.getServer().start(); secureEmbeddedServer.getServer().start();
subject = loginTestUser(); subject = loginTestUser();
...@@ -159,7 +165,7 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest { ...@@ -159,7 +165,7 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
} }
if (originalConf != null) { if (originalConf != null) {
System.setProperty("metadata.conf", originalConf); System.setProperty("atlas.conf", originalConf);
} }
} }
......
...@@ -16,8 +16,14 @@ ...@@ -16,8 +16,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.hive.hook; package org.apache.atlas.hive.hook;
import org.apache.atlas.MetadataException;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.security.SecurityProperties;
import org.apache.atlas.web.service.SecureEmbeddedServer;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
...@@ -25,19 +31,12 @@ import org.apache.hadoop.fs.Path; ...@@ -25,19 +31,12 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.security.SecurityProperties;
import org.apache.hadoop.metadata.web.service.SecureEmbeddedServer;
import org.apache.hadoop.security.alias.CredentialProvider; import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider; import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.ssl.SSLHostnameVerifier; import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.mortbay.jetty.Server; import org.mortbay.jetty.Server;
import org.mortbay.jetty.webapp.WebAppContext; import org.mortbay.jetty.webapp.WebAppContext;
import org.testng.Assert; import org.testng.Assert;
...@@ -51,7 +50,13 @@ import java.io.IOException; ...@@ -51,7 +50,13 @@ import java.io.IOException;
import java.net.URL; import java.net.URL;
import java.nio.file.Files; import java.nio.file.Files;
import static org.apache.hadoop.metadata.security.SecurityProperties.*; import static org.apache.atlas.security.SecurityProperties.CERT_STORES_CREDENTIAL_PROVIDER_PATH;
import static org.apache.atlas.security.SecurityProperties.KEYSTORE_FILE_KEY;
import static org.apache.atlas.security.SecurityProperties.KEYSTORE_PASSWORD_KEY;
import static org.apache.atlas.security.SecurityProperties.SERVER_CERT_PASSWORD_KEY;
import static org.apache.atlas.security.SecurityProperties.TLS_ENABLED;
import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_FILE_KEY;
import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY;
public class SSLHiveHookIT { public class SSLHiveHookIT {
private static final String DGI_URL = "https://localhost:21443/"; private static final String DGI_URL = "https://localhost:21443/";
...@@ -105,8 +110,8 @@ public class SSLHiveHookIT { ...@@ -105,8 +110,8 @@ public class SSLHiveHookIT {
final PropertiesConfiguration configuration = new PropertiesConfiguration(); final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(TLS_ENABLED, true); configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/metadata.keystore"); configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/metadata.keystore"); configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl); configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString()); configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
...@@ -119,7 +124,7 @@ public class SSLHiveHookIT { ...@@ -119,7 +124,7 @@ public class SSLHiveHookIT {
} }
}; };
secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/metadata-governance") { secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/apache-atlas") {
@Override @Override
public PropertiesConfiguration getConfiguration() { public PropertiesConfiguration getConfiguration() {
return configuration; return configuration;
...@@ -179,7 +184,7 @@ public class SSLHiveHookIT { ...@@ -179,7 +184,7 @@ public class SSLHiveHookIT {
} }
protected String getWarPath() { protected String getWarPath() {
return String.format("/../../webapp/target/metadata-webapp-%s", return String.format("/../../webapp/target/atlas-webapp-%s",
System.getProperty("project.version", "0.1-incubating-SNAPSHOT")); System.getProperty("project.version", "0.1-incubating-SNAPSHOT"));
} }
...@@ -188,7 +193,7 @@ public class SSLHiveHookIT { ...@@ -188,7 +193,7 @@ public class SSLHiveHookIT {
hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, ""); hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName()); hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore"); hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/atlas");
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL); hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true"); hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true"); hiveConf.set("hive.hook.dgi.synchronous", "true");
......
...@@ -16,17 +16,17 @@ ...@@ -16,17 +16,17 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.atlas;
import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.DefaultClientConfig; import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler; import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.apache.atlas.security.SecureClientUtils;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.json.InstanceSerialization;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.security.SecureClientUtils;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
...@@ -40,7 +40,7 @@ import javax.ws.rs.core.UriBuilder; ...@@ -40,7 +40,7 @@ import javax.ws.rs.core.UriBuilder;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import static org.apache.hadoop.metadata.security.SecurityProperties.TLS_ENABLED; import static org.apache.atlas.security.SecurityProperties.TLS_ENABLED;
/** /**
* Client for metadata. * Client for metadata.
...@@ -59,7 +59,7 @@ public class MetadataServiceClient { ...@@ -59,7 +59,7 @@ public class MetadataServiceClient {
public static final String COUNT = "count"; public static final String COUNT = "count";
public static final String ROWS = "rows"; public static final String ROWS = "rows";
public static final String BASE_URI = "api/metadata/"; public static final String BASE_URI = "api/atlas/";
public static final String TYPES = "types"; public static final String TYPES = "types";
public static final String URI_ENTITIES = "entities"; public static final String URI_ENTITIES = "entities";
public static final String URI_TRAITS = "traits"; public static final String URI_TRAITS = "traits";
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.atlas;
import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse;
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.atlas;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.slf4j.Logger; import org.slf4j.Logger;
...@@ -40,7 +40,7 @@ public class PropertiesUtil { ...@@ -40,7 +40,7 @@ public class PropertiesUtil {
} }
private static PropertiesConfiguration getPropertiesConfiguration(String name) throws MetadataException { private static PropertiesConfiguration getPropertiesConfiguration(String name) throws MetadataException {
String confLocation = System.getProperty("metadata.conf"); String confLocation = System.getProperty("atlas.conf");
URL url; URL url;
try { try {
if (confLocation == null) { if (confLocation == null) {
......
...@@ -14,18 +14,17 @@ ...@@ -14,18 +14,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.security; package org.apache.atlas.security;
import com.sun.jersey.api.client.config.DefaultClientConfig; import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory; import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler; import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.apache.atlas.MetadataException;
import org.apache.atlas.PropertiesUtil;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.client.Authenticator; import org.apache.hadoop.security.authentication.client.Authenticator;
import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.security.ssl.SSLFactory;
...@@ -42,14 +41,15 @@ import javax.net.ssl.SSLSocketFactory; ...@@ -42,14 +41,15 @@ import javax.net.ssl.SSLSocketFactory;
import java.io.File; import java.io.File;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.net.URL; import java.net.URL;
import java.net.URLConnection; import java.net.URLConnection;
import java.security.GeneralSecurityException; import java.security.GeneralSecurityException;
import java.security.PrivilegedExceptionAction;
import static org.apache.hadoop.metadata.security.SecurityProperties.*; import static org.apache.atlas.security.SecurityProperties.CERT_STORES_CREDENTIAL_PROVIDER_PATH;
import static org.apache.atlas.security.SecurityProperties.CLIENT_AUTH_KEY;
import static org.apache.atlas.security.SecurityProperties.KEYSTORE_FILE_KEY;
import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_FILE_KEY;
/** /**
* *
...@@ -69,7 +69,7 @@ public class SecureClientUtils { ...@@ -69,7 +69,7 @@ public class SecureClientUtils {
conf.addResource(conf.get(SSLFactory.SSL_CLIENT_CONF_KEY, "ssl-client.xml")); conf.addResource(conf.get(SSLFactory.SSL_CLIENT_CONF_KEY, "ssl-client.xml"));
String authType = "simple"; String authType = "simple";
if (clientConfig != null) { if (clientConfig != null) {
authType = clientConfig.getString("metadata.http.authentication.type", "simple"); authType = clientConfig.getString("atlas.http.authentication.type", "simple");
} }
UserGroupInformation.setConfiguration(conf); UserGroupInformation.setConfiguration(conf);
final ConnectionConfigurator connConfigurator = newConnConfigurator(conf); final ConnectionConfigurator connConfigurator = newConnConfigurator(conf);
...@@ -147,7 +147,7 @@ public class SecureClientUtils { ...@@ -147,7 +147,7 @@ public class SecureClientUtils {
} }
private static File getSSLClientFile() throws MetadataException { private static File getSSLClientFile() throws MetadataException {
String confLocation = System.getProperty("metadata.conf"); String confLocation = System.getProperty("atlas.conf");
File sslDir; File sslDir;
try { try {
if (confLocation == null) { if (confLocation == null) {
......
...@@ -15,22 +15,22 @@ ...@@ -15,22 +15,22 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.security; package org.apache.atlas.security;
/** /**
* *
*/ */
public interface SecurityProperties { public interface SecurityProperties {
public static final String TLS_ENABLED = "metadata.enableTLS"; public static final String TLS_ENABLED = "atlas.enableTLS";
public static final String KEYSTORE_FILE_KEY = "keystore.file"; public static final String KEYSTORE_FILE_KEY = "keystore.file";
public static final String DEFAULT_KEYSTORE_FILE_LOCATION = "target/metadata.keystore"; public static final String DEFAULT_KEYSTORE_FILE_LOCATION = "target/atlas.keystore";
public static final String KEYSTORE_PASSWORD_KEY = "keystore.password"; public static final String KEYSTORE_PASSWORD_KEY = "keystore.password";
public static final String TRUSTSTORE_FILE_KEY = "truststore.file"; public static final String TRUSTSTORE_FILE_KEY = "truststore.file";
public static final String DEFATULT_TRUSTORE_FILE_LOCATION = "target/metadata.keystore"; public static final String DEFATULT_TRUSTORE_FILE_LOCATION = "target/atlas.keystore";
public static final String TRUSTSTORE_PASSWORD_KEY = "truststore.password"; public static final String TRUSTSTORE_PASSWORD_KEY = "truststore.password";
public static final String SERVER_CERT_PASSWORD_KEY = "password"; public static final String SERVER_CERT_PASSWORD_KEY = "password";
public static final String CLIENT_AUTH_KEY = "client.auth.enabled"; public static final String CLIENT_AUTH_KEY = "client.auth.enabled";
public static final String CERT_STORES_CREDENTIAL_PROVIDER_PATH = "cert.stores.credential.provider.path"; public static final String CERT_STORES_CREDENTIAL_PROVIDER_PATH = "cert.stores.credential.provider.path";
public static final String SSL_CLIENT_PROPERTIES = "ssl-client.xml"; public static final String SSL_CLIENT_PROPERTIES = "ssl-client.xml";
public static final String BIND_ADDRESS = "metadata.server.bind.address"; public static final String BIND_ADDRESS = "atlas.server.bind.address";
} }
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.security; package org.apache.atlas.security;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
...@@ -51,7 +51,7 @@ public class BaseSecurityTest { ...@@ -51,7 +51,7 @@ public class BaseSecurityTest {
protected MiniKdc kdc; protected MiniKdc kdc;
protected String getWarPath() { protected String getWarPath() {
return String.format("/target/metadata-webapp-%s.war", return String.format("/target/atlas-webapp-%s.war",
System.getProperty("release.version", "0.1-incubating-SNAPSHOT")); System.getProperty("release.version", "0.1-incubating-SNAPSHOT"));
} }
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.atlas;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
......
...@@ -15,13 +15,13 @@ ...@@ -15,13 +15,13 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.atlas;
import com.google.inject.Inject; import com.google.inject.Inject;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation; import org.aopalliance.intercept.MethodInvocation;
import org.apache.hadoop.metadata.repository.graph.GraphProvider; import org.apache.atlas.repository.graph.GraphProvider;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
......
...@@ -16,27 +16,26 @@ ...@@ -16,27 +16,26 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.atlas;
import com.google.inject.Scopes;
import com.google.inject.matcher.Matchers; import com.google.inject.matcher.Matchers;
import com.google.inject.throwingproviders.ThrowingProviderBinder; import com.google.inject.throwingproviders.ThrowingProviderBinder;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInterceptor;
import org.apache.hadoop.metadata.discovery.DiscoveryService; import org.apache.atlas.discovery.DiscoveryService;
import org.apache.hadoop.metadata.discovery.HiveLineageService; import org.apache.atlas.discovery.HiveLineageService;
import org.apache.hadoop.metadata.discovery.LineageService; import org.apache.atlas.discovery.LineageService;
import org.apache.hadoop.metadata.discovery.SearchIndexer; import org.apache.atlas.discovery.SearchIndexer;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService; import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.atlas.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository; import org.apache.atlas.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphBackedSearchIndexer; import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.hadoop.metadata.repository.graph.GraphProvider; import org.apache.atlas.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.repository.graph.TitanGraphProvider; import org.apache.atlas.repository.graph.TitanGraphProvider;
import org.apache.hadoop.metadata.repository.typestore.GraphBackedTypeStore; import org.apache.atlas.repository.typestore.GraphBackedTypeStore;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore; import org.apache.atlas.repository.typestore.ITypeStore;
import org.apache.hadoop.metadata.services.DefaultMetadataService; import org.apache.atlas.services.DefaultMetadataService;
import org.apache.hadoop.metadata.services.MetadataService; import org.apache.atlas.services.MetadataService;
/** /**
* Guice module for Repository module. * Guice module for Repository module.
......
...@@ -16,9 +16,9 @@ ...@@ -16,9 +16,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.discovery; package org.apache.atlas.discovery;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import java.security.PrivilegedActionException; import java.security.PrivilegedActionException;
......
...@@ -16,9 +16,7 @@ ...@@ -16,9 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.discovery; package org.apache.atlas.discovery;
import org.apache.hadoop.metadata.MetadataException;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
...@@ -48,7 +46,7 @@ public interface DiscoveryService { ...@@ -48,7 +46,7 @@ public interface DiscoveryService {
* *
* @param gremlinQuery query in gremlin dsl format * @param gremlinQuery query in gremlin dsl format
* @return List of Maps * @return List of Maps
* @throws org.apache.hadoop.metadata.discovery.DiscoveryException * @throws org.apache.atlas.discovery.DiscoveryException
*/ */
List<Map<String, String>> searchByGremlin(String gremlinQuery) throws DiscoveryException; List<Map<String, String>> searchByGremlin(String gremlinQuery) throws DiscoveryException;
} }
...@@ -16,23 +16,23 @@ ...@@ -16,23 +16,23 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.discovery; package org.apache.atlas.discovery;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.MetadataException;
import org.apache.atlas.ParamChecker;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.discovery.graph.DefaultGraphPersistenceStrategy;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.query.Expressions;
import org.apache.atlas.query.GremlinQueryResult;
import org.apache.atlas.query.HiveLineageQuery;
import org.apache.atlas.query.HiveWhereUsedQuery;
import org.apache.atlas.repository.EntityNotFoundException;
import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.repository.graph.GraphProvider;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.ParamChecker;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.discovery.graph.DefaultGraphPersistenceStrategy;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.query.Expressions;
import org.apache.hadoop.metadata.query.GremlinQueryResult;
import org.apache.hadoop.metadata.query.HiveLineageQuery;
import org.apache.hadoop.metadata.query.HiveWhereUsedQuery;
import org.apache.hadoop.metadata.repository.EntityNotFoundException;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import scala.Option; import scala.Option;
...@@ -66,19 +66,19 @@ public class HiveLineageService implements LineageService { ...@@ -66,19 +66,19 @@ public class HiveLineageService implements LineageService {
try { try {
PropertiesConfiguration conf = PropertiesUtil.getApplicationProperties(); PropertiesConfiguration conf = PropertiesUtil.getApplicationProperties();
HIVE_TABLE_TYPE_NAME = HIVE_TABLE_TYPE_NAME =
conf.getString("metadata.lineage.hive.table.type.name", "DataSet"); conf.getString("atlas.lineage.hive.table.type.name", "DataSet");
HIVE_PROCESS_TYPE_NAME = HIVE_PROCESS_TYPE_NAME =
conf.getString("metadata.lineage.hive.process.type.name", "Process"); conf.getString("atlas.lineage.hive.process.type.name", "Process");
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME = HIVE_PROCESS_INPUT_ATTRIBUTE_NAME =
conf.getString("metadata.lineage.hive.process.inputs.name", "inputs"); conf.getString("atlas.lineage.hive.process.inputs.name", "inputs");
HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME = HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME =
conf.getString("metadata.lineage.hive.process.outputs.name", "outputs"); conf.getString("atlas.lineage.hive.process.outputs.name", "outputs");
HIVE_TABLE_SCHEMA_QUERY = conf.getString( HIVE_TABLE_SCHEMA_QUERY = conf.getString(
"metadata.lineage.hive.table.schema.query", "atlas.lineage.hive.table.schema.query",
"hive_table where name=\"%s\", columns"); "hive_table where name=\"%s\", columns");
HIVE_TABLE_EXISTS_QUERY = conf.getString( HIVE_TABLE_EXISTS_QUERY = conf.getString(
"metadata.lineage.hive.table.exists.query", "atlas.lineage.hive.table.exists.query",
"from hive_table where name=\"%s\""); "from hive_table where name=\"%s\"");
} catch (MetadataException e) { } catch (MetadataException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
......
...@@ -16,9 +16,9 @@ ...@@ -16,9 +16,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.discovery; package org.apache.atlas.discovery;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
/** /**
* Lineage service interface. * Lineage service interface.
......
...@@ -16,9 +16,9 @@ ...@@ -16,9 +16,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.discovery; package org.apache.atlas.discovery;
import org.apache.hadoop.metadata.listener.TypesChangeListener; import org.apache.atlas.listener.TypesChangeListener;
/** /**
......
...@@ -16,19 +16,26 @@ ...@@ -16,19 +16,26 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.discovery.graph; package org.apache.atlas.discovery.graph;
import com.thinkaurelius.titan.core.TitanVertex; import com.thinkaurelius.titan.core.TitanVertex;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.query.*; import org.apache.atlas.query.Expressions;
import org.apache.hadoop.metadata.query.TypeUtils; import org.apache.atlas.query.GraphPersistenceStrategies;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.atlas.query.GraphPersistenceStrategies$class;
import org.apache.hadoop.metadata.repository.Constants; import org.apache.atlas.query.TypeUtils;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository; import org.apache.atlas.repository.Constants;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.repository.MetadataRepository;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.atlas.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.types.*; import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.IDataType;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.StructType;
import org.apache.atlas.typesystem.types.TraitType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import scala.collection.Traversable; import scala.collection.Traversable;
......
...@@ -16,27 +16,27 @@ ...@@ -16,27 +16,27 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.discovery.graph; package org.apache.atlas.discovery.graph;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.TitanIndexQuery; import com.thinkaurelius.titan.core.TitanIndexQuery;
import com.thinkaurelius.titan.core.TitanProperty; import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex; import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.GraphTransaction; import org.apache.atlas.GraphTransaction;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.atlas.MetadataServiceClient;
import org.apache.hadoop.metadata.discovery.DiscoveryException; import org.apache.atlas.discovery.DiscoveryException;
import org.apache.hadoop.metadata.discovery.DiscoveryService; import org.apache.atlas.discovery.DiscoveryService;
import org.apache.hadoop.metadata.query.Expressions; import org.apache.atlas.query.Expressions;
import org.apache.hadoop.metadata.query.GremlinEvaluator; import org.apache.atlas.query.GremlinEvaluator;
import org.apache.hadoop.metadata.query.GremlinQuery; import org.apache.atlas.query.GremlinQuery;
import org.apache.hadoop.metadata.query.GremlinQueryResult; import org.apache.atlas.query.GremlinQueryResult;
import org.apache.hadoop.metadata.query.GremlinTranslator; import org.apache.atlas.query.GremlinTranslator;
import org.apache.hadoop.metadata.query.QueryParser; import org.apache.atlas.query.QueryParser;
import org.apache.hadoop.metadata.query.QueryProcessor; import org.apache.atlas.query.QueryProcessor;
import org.apache.hadoop.metadata.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.atlas.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphProvider; import org.apache.atlas.repository.graph.GraphProvider;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
...@@ -158,7 +158,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -158,7 +158,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
* *
* @param gremlinQuery query in gremlin dsl format * @param gremlinQuery query in gremlin dsl format
* @return List of Maps * @return List of Maps
* @throws org.apache.hadoop.metadata.discovery.DiscoveryException * @throws org.apache.atlas.discovery.DiscoveryException
*/ */
@Override @Override
@GraphTransaction @GraphTransaction
......
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.listener; package org.apache.atlas.listener;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
/** /**
* Entity (a Typed instance) change notification listener. * Entity (a Typed instance) change notification listener.
...@@ -30,7 +30,7 @@ public interface EntityChangeListener { ...@@ -30,7 +30,7 @@ public interface EntityChangeListener {
* This is upon adding a new typed instance to the repository. * This is upon adding a new typed instance to the repository.
* *
* @param typedInstance a typed instance * @param typedInstance a typed instance
* @throws org.apache.hadoop.metadata.MetadataException * @throws org.apache.atlas.MetadataException
*/ */
void onEntityAdded(ITypedReferenceableInstance typedInstance) throws MetadataException; void onEntityAdded(ITypedReferenceableInstance typedInstance) throws MetadataException;
...@@ -39,7 +39,7 @@ public interface EntityChangeListener { ...@@ -39,7 +39,7 @@ public interface EntityChangeListener {
* *
* @param guid globally unique identifier for the entity * @param guid globally unique identifier for the entity
* @param traitName trait name for the instance that needs to be added to entity * @param traitName trait name for the instance that needs to be added to entity
* @throws org.apache.hadoop.metadata.MetadataException * @throws org.apache.atlas.MetadataException
*/ */
void onTraitAdded(String guid, String traitName) throws MetadataException; void onTraitAdded(String guid, String traitName) throws MetadataException;
...@@ -48,7 +48,7 @@ public interface EntityChangeListener { ...@@ -48,7 +48,7 @@ public interface EntityChangeListener {
* *
* @param guid globally unique identifier for the entity * @param guid globally unique identifier for the entity
* @param traitName trait name for the instance that needs to be deleted from entity * @param traitName trait name for the instance that needs to be deleted from entity
* @throws org.apache.hadoop.metadata.MetadataException * @throws org.apache.atlas.MetadataException
*/ */
void onTraitDeleted(String guid, String traitName) throws MetadataException; void onTraitDeleted(String guid, String traitName) throws MetadataException;
} }
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.listener; package org.apache.atlas.listener;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.typesystem.types.IDataType; import org.apache.atlas.typesystem.types.IDataType;
/** /**
* Types change notification listener. * Types change notification listener.
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository; package org.apache.atlas.repository;
public final class Constants { public final class Constants {
......
...@@ -16,13 +16,13 @@ ...@@ -16,13 +16,13 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository; package org.apache.atlas.repository;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.typesystem.IReferenceableInstance; import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.atlas.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.ObjectGraphWalker; import org.apache.atlas.typesystem.types.ObjectGraphWalker;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
......
...@@ -16,9 +16,7 @@ ...@@ -16,9 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository; package org.apache.atlas.repository;
import org.apache.hadoop.metadata.MetadataException;
/** /**
* A simple wrapper for 404. * A simple wrapper for 404.
......
...@@ -16,14 +16,14 @@ ...@@ -16,14 +16,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository; package org.apache.atlas.repository;
import org.apache.hadoop.metadata.typesystem.IReferenceableInstance; import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.atlas.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalType; import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import java.util.List; import java.util.List;
......
...@@ -16,14 +16,14 @@ ...@@ -16,14 +16,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository; package org.apache.atlas.repository;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.typesystem.IReferenceableInstance; import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.types.AttributeInfo; import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.hadoop.metadata.typesystem.types.IDataType; import org.apache.atlas.typesystem.types.IDataType;
import java.util.List; import java.util.List;
......
...@@ -16,9 +16,9 @@ ...@@ -16,9 +16,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository; package org.apache.atlas.repository;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
/** /**
* Base Exception class for Repository API. * Base Exception class for Repository API.
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.graph; package org.apache.atlas.repository.graph;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
...@@ -26,30 +26,30 @@ import com.tinkerpop.blueprints.Direction; ...@@ -26,30 +26,30 @@ import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.GraphQuery; import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.MetadataException;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.EntityNotFoundException;
import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.repository.RepositoryException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.ITypedInstance;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.persistence.MapIds;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.atlas.typesystem.types.IDataType;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.ObjectGraphWalker;
import org.apache.atlas.typesystem.types.StructType;
import org.apache.atlas.typesystem.types.TraitType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.metadata.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.EntityNotFoundException;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.IReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedInstance;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.persistence.MapIds;
import org.apache.hadoop.metadata.typesystem.types.AttributeInfo;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalType;
import org.apache.hadoop.metadata.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.ObjectGraphWalker;
import org.apache.hadoop.metadata.typesystem.types.StructType;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
......
...@@ -16,30 +16,27 @@ ...@@ -16,30 +16,27 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.graph; package org.apache.atlas.repository.graph;
import com.google.inject.Singleton; import com.google.inject.Singleton;
import com.thinkaurelius.titan.core.Cardinality; import com.thinkaurelius.titan.core.Cardinality;
import com.thinkaurelius.titan.core.EdgeLabel;
import com.thinkaurelius.titan.core.Order;
import com.thinkaurelius.titan.core.PropertyKey; import com.thinkaurelius.titan.core.PropertyKey;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.schema.Mapping; import com.thinkaurelius.titan.core.schema.Mapping;
import com.thinkaurelius.titan.core.schema.TitanGraphIndex; import com.thinkaurelius.titan.core.schema.TitanGraphIndex;
import com.thinkaurelius.titan.core.schema.TitanManagement; import com.thinkaurelius.titan.core.schema.TitanManagement;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.discovery.SearchIndexer; import org.apache.atlas.discovery.SearchIndexer;
import org.apache.hadoop.metadata.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.hadoop.metadata.repository.RepositoryException; import org.apache.atlas.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.types.AttributeInfo; import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.IDataType; import org.apache.atlas.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.StructType; import org.apache.atlas.typesystem.types.StructType;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -139,7 +136,7 @@ public class GraphBackedSearchIndexer implements SearchIndexer { ...@@ -139,7 +136,7 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
* *
* @param typeName type name * @param typeName type name
* @param dataType data type * @param dataType data type
* @throws org.apache.hadoop.metadata.MetadataException * @throws org.apache.atlas.MetadataException
*/ */
@Override @Override
public void onAdd(String typeName, IDataType dataType) throws MetadataException { public void onAdd(String typeName, IDataType dataType) throws MetadataException {
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.graph; package org.apache.atlas.repository.graph;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.TitanVertex; import com.thinkaurelius.titan.core.TitanVertex;
...@@ -25,9 +25,9 @@ import com.tinkerpop.blueprints.Edge; ...@@ -25,9 +25,9 @@ import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Graph; import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.GraphQuery; import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.atlas.typesystem.persistence.Id;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.graph; package org.apache.atlas.repository.graph;
import com.google.inject.throwingproviders.CheckedProvider; import com.google.inject.throwingproviders.CheckedProvider;
import com.tinkerpop.blueprints.Graph; import com.tinkerpop.blueprints.Graph;
......
...@@ -16,16 +16,15 @@ ...@@ -16,16 +16,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.graph; package org.apache.atlas.repository.graph;
import com.google.inject.Provides; import com.google.inject.Provides;
import com.thinkaurelius.titan.core.TitanFactory; import com.thinkaurelius.titan.core.TitanFactory;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.atlas.MetadataException;
import org.apache.atlas.PropertiesUtil;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -42,7 +41,7 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> { ...@@ -42,7 +41,7 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> {
/** /**
* Constant for the configuration property that indicates the prefix. * Constant for the configuration property that indicates the prefix.
*/ */
private static final String METADATA_PREFIX = "metadata.graph."; private static final String ATLAS_PREFIX = "atlas.graph.";
private static Configuration getConfiguration() throws MetadataException { private static Configuration getConfiguration() throws MetadataException {
PropertiesConfiguration configProperties = PropertiesUtil.getApplicationProperties(); PropertiesConfiguration configProperties = PropertiesUtil.getApplicationProperties();
...@@ -52,9 +51,9 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> { ...@@ -52,9 +51,9 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> {
final Iterator<String> iterator = configProperties.getKeys(); final Iterator<String> iterator = configProperties.getKeys();
while (iterator.hasNext()) { while (iterator.hasNext()) {
String key = iterator.next(); String key = iterator.next();
if (key.startsWith(METADATA_PREFIX)) { if (key.startsWith(ATLAS_PREFIX)) {
String value = (String) configProperties.getProperty(key); String value = (String) configProperties.getProperty(key);
key = key.substring(METADATA_PREFIX.length()); key = key.substring(ATLAS_PREFIX.length());
graphConfig.setProperty(key, value); graphConfig.setProperty(key, value);
LOG.info("Using graph property {}={}", key, value); LOG.info("Using graph property {}={}", key, value);
} }
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
...@@ -28,12 +28,12 @@ import it.unimi.dsi.fastutil.floats.FloatArrayList; ...@@ -28,12 +28,12 @@ import it.unimi.dsi.fastutil.floats.FloatArrayList;
import it.unimi.dsi.fastutil.ints.IntArrayList; import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.longs.LongArrayList; import it.unimi.dsi.fastutil.longs.LongArrayList;
import it.unimi.dsi.fastutil.shorts.ShortArrayList; import it.unimi.dsi.fastutil.shorts.ShortArrayList;
import org.apache.hadoop.metadata.repository.RepositoryException; import org.apache.atlas.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.atlas.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.persistence.StructInstance; import org.apache.atlas.typesystem.persistence.StructInstance;
import org.apache.hadoop.metadata.typesystem.types.AttributeInfo; import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.IConstructableType; import org.apache.atlas.typesystem.types.IConstructableType;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
......
...@@ -16,14 +16,14 @@ ...@@ -16,14 +16,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.repository.RepositoryException; import org.apache.atlas.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.atlas.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.persistence.ReferenceableInstance; import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import java.util.ArrayList; import java.util.ArrayList;
......
...@@ -16,19 +16,19 @@ ...@@ -16,19 +16,19 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.hadoop.metadata.repository.RepositoryException; import org.apache.atlas.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.atlas.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.persistence.ReferenceableInstance; import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.persistence.StructInstance; import org.apache.atlas.typesystem.persistence.StructInstance;
import org.apache.hadoop.metadata.typesystem.types.AttributeInfo; import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalType; import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.hadoop.metadata.typesystem.types.IConstructableType; import org.apache.atlas.typesystem.types.IConstructableType;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
......
...@@ -16,11 +16,11 @@ ...@@ -16,11 +16,11 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import org.apache.hadoop.metadata.repository.RepositoryException; import org.apache.atlas.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.persistence.StructInstance; import org.apache.atlas.typesystem.persistence.StructInstance;
import org.apache.hadoop.metadata.typesystem.types.IConstructableType; import org.apache.atlas.typesystem.types.IConstructableType;
public interface IAttributeStore { public interface IAttributeStore {
/** /**
......
...@@ -16,24 +16,24 @@ ...@@ -16,24 +16,24 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.repository.DiscoverInstances; import org.apache.atlas.repository.DiscoverInstances;
import org.apache.hadoop.metadata.repository.IRepository; import org.apache.atlas.repository.IRepository;
import org.apache.hadoop.metadata.repository.RepositoryException; import org.apache.atlas.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.IReferenceableInstance; import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.atlas.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.persistence.MapIds; import org.apache.atlas.typesystem.persistence.MapIds;
import org.apache.hadoop.metadata.typesystem.persistence.ReferenceableInstance; import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalType; import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.ObjectGraphWalker; import org.apache.atlas.typesystem.types.ObjectGraphWalker;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
...@@ -105,7 +105,7 @@ public class MemRepository implements IRepository { ...@@ -105,7 +105,7 @@ public class MemRepository implements IRepository {
* *
* @param i * @param i
* @return * @return
* @throws org.apache.hadoop.metadata.repository.RepositoryException * @throws org.apache.atlas.repository.RepositoryException
*/ */
public ITypedReferenceableInstance create(IReferenceableInstance i) throws RepositoryException { public ITypedReferenceableInstance create(IReferenceableInstance i) throws RepositoryException {
......
...@@ -16,18 +16,18 @@ ...@@ -16,18 +16,18 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.atlas.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.ObjectGraphWalker; import org.apache.atlas.typesystem.types.ObjectGraphWalker;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
......
...@@ -16,14 +16,14 @@ ...@@ -16,14 +16,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.metadata.repository.RepositoryException; import org.apache.atlas.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.persistence.StructInstance; import org.apache.atlas.typesystem.persistence.StructInstance;
import org.apache.hadoop.metadata.typesystem.types.AttributeInfo; import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.hadoop.metadata.typesystem.types.StructType; import org.apache.atlas.typesystem.types.StructType;
import java.util.Collection; import java.util.Collection;
import java.util.Map; import java.util.Map;
......
...@@ -16,12 +16,12 @@ ...@@ -16,12 +16,12 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import org.apache.hadoop.metadata.repository.RepositoryException; import org.apache.atlas.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.persistence.ReferenceableInstance; import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.persistence.StructInstance; import org.apache.atlas.typesystem.persistence.StructInstance;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import java.util.ArrayList; import java.util.ArrayList;
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.typestore; package org.apache.atlas.repository.typestore;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.inject.Inject; import com.google.inject.Inject;
...@@ -25,27 +25,26 @@ import com.thinkaurelius.titan.core.TitanGraph; ...@@ -25,27 +25,26 @@ import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.StringUtils; import org.apache.atlas.GraphTransaction;
import org.apache.hadoop.metadata.GraphTransaction; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.repository.Constants;
import org.apache.hadoop.metadata.repository.Constants; import org.apache.atlas.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.repository.graph.GraphProvider; import org.apache.atlas.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.hadoop.metadata.typesystem.types.AttributeInfo; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.EnumType;
import org.apache.hadoop.metadata.typesystem.types.EnumType; import org.apache.atlas.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition; import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.EnumValue; import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalType; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.IDataType; import org.apache.atlas.typesystem.types.StructType;
import org.apache.hadoop.metadata.typesystem.types.StructType; import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; import org.apache.atlas.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeUtils;
import org.apache.hadoop.metadata.typesystem.types.TypeUtils;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
......
...@@ -16,12 +16,12 @@ ...@@ -16,12 +16,12 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.typestore; package org.apache.atlas.repository.typestore;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.atlas.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
public interface ITypeStore { public interface ITypeStore {
/** /**
...@@ -42,7 +42,7 @@ public interface ITypeStore { ...@@ -42,7 +42,7 @@ public interface ITypeStore {
/** /**
* Restore all type definitions * Restore all type definitions
* @return List of persisted type definitions * @return List of persisted type definitions
* @throws org.apache.hadoop.metadata.MetadataException * @throws org.apache.atlas.MetadataException
*/ */
TypesDef restore() throws MetadataException; TypesDef restore() throws MetadataException;
} }
...@@ -16,9 +16,9 @@ ...@@ -16,9 +16,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.typestore; package org.apache.atlas.repository.typestore;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
public class StorageException extends MetadataException { public class StorageException extends MetadataException {
public StorageException(String type) { public StorageException(String type) {
......
...@@ -16,39 +16,39 @@ ...@@ -16,39 +16,39 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.services; package org.apache.atlas.services;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.GraphTransaction; import org.apache.atlas.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient; import org.apache.atlas.MetadataServiceClient;
import org.apache.hadoop.metadata.ParamChecker; import org.apache.atlas.ParamChecker;
import org.apache.hadoop.metadata.classification.InterfaceAudience; import org.apache.atlas.classification.InterfaceAudience;
import org.apache.hadoop.metadata.discovery.SearchIndexer; import org.apache.atlas.discovery.SearchIndexer;
import org.apache.hadoop.metadata.listener.EntityChangeListener; import org.apache.atlas.listener.EntityChangeListener;
import org.apache.hadoop.metadata.listener.TypesChangeListener; import org.apache.atlas.listener.TypesChangeListener;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.atlas.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore; import org.apache.atlas.repository.typestore.ITypeStore;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.atlas.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.atlas.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization; import org.apache.atlas.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization; import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition; import org.apache.atlas.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.IDataType; import org.apache.atlas.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.TypeUtils; import org.apache.atlas.typesystem.types.TypeUtils;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
...@@ -56,7 +56,6 @@ import org.slf4j.LoggerFactory; ...@@ -56,7 +56,6 @@ import org.slf4j.LoggerFactory;
import javax.inject.Inject; import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
import java.util.HashMap;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
...@@ -122,7 +121,8 @@ public class DefaultMetadataService implements MetadataService { ...@@ -122,7 +121,8 @@ public class DefaultMetadataService implements MetadataService {
ImmutableList.<String>of(), NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE); ImmutableList.<String>of(), NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE);
HierarchicalTypeDefinition<ClassType> datasetType = TypesUtil HierarchicalTypeDefinition<ClassType> datasetType = TypesUtil
.createClassTypeDef(MetadataServiceClient.DATA_SET_SUPER_TYPE, ImmutableList.<String>of(), .createClassTypeDef(MetadataServiceClient.DATA_SET_SUPER_TYPE,
ImmutableList.<String>of(),
NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE); NAME_ATTRIBUTE, DESCRIPTION_ATTRIBUTE);
HierarchicalTypeDefinition<ClassType> processType = TypesUtil HierarchicalTypeDefinition<ClassType> processType = TypesUtil
......
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.services; package org.apache.atlas.services;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import java.util.List; import java.util.List;
......
...@@ -16,18 +16,17 @@ ...@@ -16,18 +16,17 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import java.util import java.util
import Expressions._
import com.thinkaurelius.titan.core.TitanGraph import com.thinkaurelius.titan.core.TitanGraph
import org.apache.hadoop.metadata.MetadataException import org.apache.atlas.query.Expressions._
import org.apache.hadoop.metadata.typesystem.ITypedStruct import org.apache.atlas.typesystem.ITypedStruct
import org.apache.hadoop.metadata.typesystem.json.{InstanceSerialization, Serialization} import org.apache.atlas.typesystem.json.{InstanceSerialization, Serialization}
import org.apache.hadoop.metadata.typesystem.persistence.{Id, StructInstance} import org.apache.atlas.typesystem.persistence.StructInstance
import org.apache.hadoop.metadata.typesystem.types.{TypeSystem, StructType, DataTypes} import org.apache.atlas.typesystem.types.DataTypes.{MapType, PrimitiveType}
import org.apache.hadoop.metadata.typesystem.types.DataTypes.{MapType, PrimitiveType} import org.apache.atlas.typesystem.types.{DataTypes, StructType, TypeSystem}
/** /**
* Represents a Query to compute the closure based on a relationship between entities of a particular type. * Represents a Query to compute the closure based on a relationship between entities of a particular type.
......
...@@ -16,11 +16,11 @@ ...@@ -16,11 +16,11 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import org.apache.hadoop.metadata.MetadataException import org.apache.atlas.MetadataException
import org.apache.hadoop.metadata.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory} import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory}
import org.apache.hadoop.metadata.typesystem.types._ import org.apache.atlas.typesystem.types._
object Expressions { object Expressions {
......
...@@ -16,16 +16,16 @@ ...@@ -16,16 +16,16 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import com.thinkaurelius.titan.core.TitanVertex import com.thinkaurelius.titan.core.TitanVertex
import com.tinkerpop.blueprints.Direction import com.tinkerpop.blueprints.Direction
import org.apache.hadoop.metadata.query.Expressions.{ComparisonExpression, ExpressionException} import org.apache.atlas.query.Expressions.{ComparisonExpression, ExpressionException}
import org.apache.hadoop.metadata.query.TypeUtils.FieldInfo import org.apache.atlas.query.TypeUtils.FieldInfo
import org.apache.hadoop.metadata.typesystem.persistence.Id import org.apache.atlas.typesystem.persistence.Id
import org.apache.hadoop.metadata.typesystem.types.DataTypes._ import org.apache.atlas.typesystem.types.DataTypes._
import org.apache.hadoop.metadata.typesystem.types._ import org.apache.atlas.typesystem.types._
import org.apache.hadoop.metadata.typesystem.{ITypedInstance, ITypedReferenceableInstance} import org.apache.atlas.typesystem.{ITypedInstance, ITypedReferenceableInstance}
import scala.collection.JavaConversions._ import scala.collection.JavaConversions._
......
...@@ -16,15 +16,15 @@ ...@@ -16,15 +16,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import javax.script.{Bindings, ScriptEngine, ScriptEngineManager} import javax.script.{Bindings, ScriptEngine, ScriptEngineManager}
import com.thinkaurelius.titan.core.TitanGraph import com.thinkaurelius.titan.core.TitanGraph
import com.tinkerpop.pipes.util.structures.Row import com.tinkerpop.pipes.util.structures.Row
import org.apache.hadoop.metadata.query.TypeUtils.ResultWithPathStruct import org.apache.atlas.query.TypeUtils.ResultWithPathStruct
import org.apache.hadoop.metadata.typesystem.json._ import org.apache.atlas.typesystem.json._
import org.apache.hadoop.metadata.typesystem.types._ import org.apache.atlas.typesystem.types._
import org.json4s._ import org.json4s._
import org.json4s.native.Serialization._ import org.json4s.native.Serialization._
......
...@@ -16,11 +16,10 @@ ...@@ -16,11 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import org.apache.hadoop.metadata.query.Expressions._ import org.apache.atlas.query.Expressions._
import org.apache.hadoop.metadata.typesystem.types.DataTypes.TypeCategory import org.apache.atlas.typesystem.types.DataTypes.TypeCategory
import org.apache.hadoop.metadata.typesystem.types.TypeSystem
import scala.collection.mutable import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.ArrayBuffer
......
...@@ -16,9 +16,9 @@ ...@@ -16,9 +16,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import org.apache.hadoop.metadata.query.Expressions._ import org.apache.atlas.query.Expressions._
import scala.util.parsing.combinator.lexical.StdLexical import scala.util.parsing.combinator.lexical.StdLexical
import scala.util.parsing.combinator.syntactical.StandardTokenParsers import scala.util.parsing.combinator.syntactical.StandardTokenParsers
......
...@@ -16,14 +16,14 @@ ...@@ -16,14 +16,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import com.thinkaurelius.titan.core.TitanGraph import com.thinkaurelius.titan.core.TitanGraph
import org.apache.hadoop.metadata.query.Expressions._ import org.apache.atlas.query.Expressions._
import org.slf4j.{LoggerFactory, Logger} import org.slf4j.{Logger, LoggerFactory}
object QueryProcessor { object QueryProcessor {
val LOG : Logger = LoggerFactory.getLogger("org.apache.hadoop.metadata.query.QueryProcessor") val LOG : Logger = LoggerFactory.getLogger("org.apache.atlas.query.QueryProcessor")
def evaluate(e: Expression, g: TitanGraph, gP : GraphPersistenceStrategies = GraphPersistenceStrategy1): def evaluate(e: Expression, g: TitanGraph, gP : GraphPersistenceStrategies = GraphPersistenceStrategy1):
GremlinQueryResult = { GremlinQueryResult = {
......
...@@ -16,16 +16,16 @@ ...@@ -16,16 +16,16 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import org.apache.hadoop.metadata.query.Expressions._ import org.apache.atlas.query.Expressions._
import org.apache.hadoop.metadata.typesystem.types.IDataType import org.apache.atlas.typesystem.types.IDataType
class Resolver(srcExpr: Option[Expression] = None, aliases: Map[String, Expression] = Map(), class Resolver(srcExpr: Option[Expression] = None, aliases: Map[String, Expression] = Map(),
connectClassExprToSrc: Boolean = false) connectClassExprToSrc: Boolean = false)
extends PartialFunction[Expression, Expression] { extends PartialFunction[Expression, Expression] {
import org.apache.hadoop.metadata.query.TypeUtils._ import org.apache.atlas.query.TypeUtils._
def isDefinedAt(x: Expression) = true def isDefinedAt(x: Expression) = true
......
...@@ -16,15 +16,15 @@ ...@@ -16,15 +16,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import java.util import java.util
import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.atomic.AtomicInteger
import org.apache.hadoop.metadata.MetadataException import org.apache.atlas.MetadataException
import org.apache.hadoop.metadata.query.Expressions.{SelectExpression, PathExpression} import org.apache.atlas.query.Expressions.{PathExpression, SelectExpression}
import org.apache.hadoop.metadata.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory} import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory}
import org.apache.hadoop.metadata.typesystem.types._ import org.apache.atlas.typesystem.types._
object TypeUtils { object TypeUtils {
val typSystem = TypeSystem.getInstance() val typSystem = TypeSystem.getInstance()
......
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.atlas;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.hadoop.metadata.repository.graph.GraphProvider; import org.apache.atlas.repository.graph.GraphProvider;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.Guice; import org.testng.annotations.Guice;
import org.testng.annotations.Test; import org.testng.annotations.Test;
......
...@@ -16,36 +16,36 @@ ...@@ -16,36 +16,36 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.atlas;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONWriter; import com.tinkerpop.blueprints.util.io.graphson.GraphSONWriter;
import org.apache.hadoop.metadata.repository.graph.GraphHelper; import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumType; import org.apache.atlas.typesystem.types.EnumType;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition; import org.apache.atlas.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumValue; import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.testng.Assert; import org.testng.Assert;
import java.io.File; import java.io.File;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef; import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createOptionalAttrDef; import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef; import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createStructTypeDef; import static org.apache.atlas.typesystem.types.utils.TypesUtil.createStructTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef; import static org.apache.atlas.typesystem.types.utils.TypesUtil.createTraitTypeDef;
/** /**
* Test utility class. * Test utility class.
......
...@@ -16,28 +16,28 @@ ...@@ -16,28 +16,28 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.discovery; package org.apache.atlas.discovery;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.TestUtils;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.query.HiveTitanSample;
import org.apache.atlas.query.QueryTestsUtils;
import org.apache.atlas.repository.graph.GraphBackedMetadataRepository;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.repository.graph.GraphProvider;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.TestUtils;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.query.HiveTitanSample;
import org.apache.hadoop.metadata.query.QueryTestsUtils;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert; import org.testng.Assert;
...@@ -54,9 +54,9 @@ import javax.script.ScriptEngineManager; ...@@ -54,9 +54,9 @@ import javax.script.ScriptEngineManager;
import javax.script.ScriptException; import javax.script.ScriptException;
import java.io.File; import java.io.File;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef; import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createOptionalAttrDef; import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef; import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
@Guice(modules = RepositoryMetadataModule.class) @Guice(modules = RepositoryMetadataModule.class)
public class GraphBackedDiscoveryServiceTest { public class GraphBackedDiscoveryServiceTest {
......
...@@ -16,30 +16,30 @@ ...@@ -16,30 +16,30 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.discovery; package org.apache.atlas.discovery;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.RepositoryMetadataModule; import org.apache.atlas.RepositoryMetadataModule;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService; import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.repository.EntityNotFoundException; import org.apache.atlas.repository.EntityNotFoundException;
import org.apache.hadoop.metadata.services.DefaultMetadataService; import org.apache.atlas.services.DefaultMetadataService;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.atlas.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization; import org.apache.atlas.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization; import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.atlas.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition; import org.apache.atlas.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.IDataType; import org.apache.atlas.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeUtils; import org.apache.atlas.typesystem.types.TypeUtils;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert; import org.testng.Assert;
...@@ -312,7 +312,7 @@ public class HiveLineageServiceTest { ...@@ -312,7 +312,7 @@ public class HiveLineageServiceTest {
attrDef("description", DataTypes.STRING_TYPE), attrDef("description", DataTypes.STRING_TYPE),
attrDef("locationUri", DataTypes.STRING_TYPE), attrDef("locationUri", DataTypes.STRING_TYPE),
attrDef("owner", DataTypes.STRING_TYPE), attrDef("owner", DataTypes.STRING_TYPE),
attrDef("createTime", DataTypes.INT_TYPE) attrDef("createTime", DataTypes.LONG_TYPE)
); );
HierarchicalTypeDefinition<ClassType> storageDescClsDef = HierarchicalTypeDefinition<ClassType> storageDescClsDef =
...@@ -334,14 +334,15 @@ public class HiveLineageServiceTest { ...@@ -334,14 +334,15 @@ public class HiveLineageServiceTest {
HierarchicalTypeDefinition<ClassType> tblClsDef = HierarchicalTypeDefinition<ClassType> tblClsDef =
TypesUtil.createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"), TypesUtil.createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"),
attrDef("owner", DataTypes.STRING_TYPE), attrDef("owner", DataTypes.STRING_TYPE),
attrDef("createTime", DataTypes.INT_TYPE), attrDef("createTime", DataTypes.LONG_TYPE),
attrDef("lastAccessTime", DataTypes.INT_TYPE), attrDef("lastAccessTime", DataTypes.LONG_TYPE),
attrDef("tableType", DataTypes.STRING_TYPE), attrDef("tableType", DataTypes.STRING_TYPE),
attrDef("temporary", DataTypes.BOOLEAN_TYPE), attrDef("temporary", DataTypes.BOOLEAN_TYPE),
new AttributeDefinition("db", DATABASE_TYPE, new AttributeDefinition("db", DATABASE_TYPE,
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("sd", STORAGE_DESC_TYPE, // todo - uncomment this, something is broken
Multiplicity.REQUIRED, true, null), // new AttributeDefinition("sd", STORAGE_DESC_TYPE,
// Multiplicity.REQUIRED, true, null),
new AttributeDefinition("columns", new AttributeDefinition("columns",
DataTypes.arrayTypeName(COLUMN_TYPE), DataTypes.arrayTypeName(COLUMN_TYPE),
Multiplicity.COLLECTION, true, null) Multiplicity.COLLECTION, true, null)
...@@ -350,8 +351,8 @@ public class HiveLineageServiceTest { ...@@ -350,8 +351,8 @@ public class HiveLineageServiceTest {
HierarchicalTypeDefinition<ClassType> loadProcessClsDef = HierarchicalTypeDefinition<ClassType> loadProcessClsDef =
TypesUtil.createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"), TypesUtil.createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"),
attrDef("userName", DataTypes.STRING_TYPE), attrDef("userName", DataTypes.STRING_TYPE),
attrDef("startTime", DataTypes.INT_TYPE), attrDef("startTime", DataTypes.LONG_TYPE),
attrDef("endTime", DataTypes.INT_TYPE), attrDef("endTime", DataTypes.LONG_TYPE),
attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
...@@ -528,12 +529,14 @@ public class HiveLineageServiceTest { ...@@ -528,12 +529,14 @@ public class HiveLineageServiceTest {
referenceable.set("description", description); referenceable.set("description", description);
referenceable.set("owner", owner); referenceable.set("owner", owner);
referenceable.set("tableType", tableType); referenceable.set("tableType", tableType);
referenceable.set("temporary", false);
referenceable.set("createTime", System.currentTimeMillis()); referenceable.set("createTime", System.currentTimeMillis());
referenceable.set("lastAccessTime", System.currentTimeMillis()); referenceable.set("lastAccessTime", System.currentTimeMillis());
referenceable.set("retention", System.currentTimeMillis()); referenceable.set("retention", System.currentTimeMillis());
referenceable.set("db", dbId); referenceable.set("db", dbId);
referenceable.set("sd", sd); // todo - uncomment this, something is broken
// referenceable.set("sd", sd);
referenceable.set("columns", columns); referenceable.set("columns", columns);
return createInstance(referenceable); return createInstance(referenceable);
......
...@@ -16,28 +16,28 @@ ...@@ -16,28 +16,28 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository; package org.apache.atlas.repository;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.repository.memory.MemRepository; import org.apache.atlas.repository.memory.MemRepository;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.atlas.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalType; import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.IDataType; import org.apache.atlas.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructType; import org.apache.atlas.typesystem.types.StructType;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.junit.Before; import org.junit.Before;
import java.math.BigDecimal; import java.math.BigDecimal;
......
...@@ -16,38 +16,38 @@ ...@@ -16,38 +16,38 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.graph; package org.apache.atlas.repository.graph;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Compare; import com.tinkerpop.blueprints.Compare;
import com.tinkerpop.blueprints.GraphQuery; import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.TestUtils;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.repository.BaseTest;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.EntityNotFoundException;
import org.apache.atlas.repository.RepositoryException;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.EnumTypeDefinition;
import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.atlas.typesystem.types.TraitType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.TestUtils;
import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.repository.BaseTest;
import org.apache.hadoop.metadata.repository.Constants;
import org.apache.hadoop.metadata.repository.EntityNotFoundException;
import org.apache.hadoop.metadata.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.IStruct;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert; import org.testng.Assert;
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.graph; package org.apache.atlas.repository.graph;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
...@@ -24,24 +24,24 @@ import com.thinkaurelius.titan.core.TitanIndexQuery; ...@@ -24,24 +24,24 @@ import com.thinkaurelius.titan.core.TitanIndexQuery;
import com.tinkerpop.blueprints.Compare; import com.tinkerpop.blueprints.Compare;
import com.tinkerpop.blueprints.GraphQuery; import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.hadoop.metadata.RepositoryMetadataModule; import org.apache.atlas.RepositoryMetadataModule;
import org.apache.hadoop.metadata.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.atlas.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumType; import org.apache.atlas.typesystem.types.EnumType;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition; import org.apache.atlas.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumValue; import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.IDataType; import org.apache.atlas.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Guice; import org.testng.annotations.Guice;
import org.testng.annotations.Test; import org.testng.annotations.Test;
......
...@@ -16,15 +16,15 @@ ...@@ -16,15 +16,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.repository.BaseTest; import org.apache.atlas.repository.BaseTest;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
......
...@@ -16,28 +16,28 @@ ...@@ -16,28 +16,28 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.typesystem.json.Serialization$; import org.apache.atlas.repository.BaseTest;
import org.apache.hadoop.metadata.repository.BaseTest; import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.IReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.atlas.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.atlas.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.atlas.typesystem.json.Serialization$;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumType; import org.apache.atlas.typesystem.types.EnumType;
import org.apache.hadoop.metadata.typesystem.types.EnumValue; import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalType; import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructType; import org.apache.atlas.typesystem.types.StructType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
......
...@@ -16,24 +16,27 @@ ...@@ -16,24 +16,27 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.typesystem.*; import org.apache.atlas.repository.BaseTest;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization$; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.json.Serialization$; import org.apache.atlas.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$; import org.apache.atlas.typesystem.Struct;
import org.apache.hadoop.metadata.repository.BaseTest; import org.apache.atlas.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.json.InstanceSerialization$;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.json.Serialization$;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.json.TypesSerialization$;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.atlas.typesystem.types.TraitType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.junit.Test; import org.junit.Test;
import java.util.ArrayList; import java.util.ArrayList;
......
...@@ -16,15 +16,15 @@ ...@@ -16,15 +16,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.repository.BaseTest; import org.apache.atlas.repository.BaseTest;
import org.apache.hadoop.metadata.repository.RepositoryException; import org.apache.atlas.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.atlas.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
......
...@@ -16,15 +16,15 @@ ...@@ -16,15 +16,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.repository.BaseTest; import org.apache.atlas.repository.BaseTest;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.atlas.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization$; import org.apache.atlas.typesystem.json.InstanceSerialization$;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructType; import org.apache.atlas.typesystem.types.StructType;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
......
...@@ -16,25 +16,25 @@ ...@@ -16,25 +16,25 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.memory; package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.repository.BaseTest; import org.apache.atlas.repository.BaseTest;
import org.apache.hadoop.metadata.typesystem.IStruct; import org.apache.atlas.typesystem.IStruct;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.atlas.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createOptionalAttrDef; import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef; import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef; import static org.apache.atlas.typesystem.types.utils.TypesUtil.createTraitTypeDef;
public class TraitTest extends BaseTest { public class TraitTest extends BaseTest {
......
...@@ -16,29 +16,29 @@ ...@@ -16,29 +16,29 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.repository.typestore; package org.apache.atlas.repository.typestore;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.hadoop.metadata.GraphTransaction; import org.apache.atlas.GraphTransaction;
import org.apache.hadoop.metadata.MetadataException; import org.apache.atlas.MetadataException;
import org.apache.hadoop.metadata.RepositoryMetadataModule; import org.apache.atlas.RepositoryMetadataModule;
import org.apache.hadoop.metadata.TestUtils; import org.apache.atlas.TestUtils;
import org.apache.hadoop.metadata.repository.graph.GraphHelper; import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.repository.graph.GraphProvider; import org.apache.atlas.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.atlas.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.atlas.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition; import org.apache.atlas.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumValue; import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.atlas.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.atlas.typesystem.types.TypeSystem;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Guice; import org.testng.annotations.Guice;
import org.testng.annotations.Test; import org.testng.annotations.Test;
......
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import org.apache.hadoop.metadata.query.Expressions._ import org.apache.atlas.query.Expressions._
import org.apache.hadoop.metadata.repository.BaseTest import org.apache.atlas.repository.BaseTest
import org.junit.{Before, Test} import org.junit.{Before, Test}
class ExpressionTest extends BaseTest { class ExpressionTest extends BaseTest {
......
...@@ -16,14 +16,13 @@ ...@@ -16,14 +16,13 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import com.thinkaurelius.titan.core.TitanGraph import com.thinkaurelius.titan.core.TitanGraph
import org.apache.hadoop.metadata.query.Expressions._ import org.apache.atlas.query.Expressions._
import org.apache.hadoop.metadata.typesystem.types.TypeSystem import org.apache.atlas.typesystem.types.TypeSystem
import org.junit.runner.RunWith import org.junit.runner.RunWith
import org.scalatest._ import org.scalatest._
import Matchers._
import org.scalatest.junit.JUnitRunner import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner]) @RunWith(classOf[JUnitRunner])
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import java.io.File import java.io.File
import java.util.UUID import java.util.UUID
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import org.junit.{Assert, Test} import org.junit.{Assert, Test}
......
...@@ -16,11 +16,11 @@ ...@@ -16,11 +16,11 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import com.thinkaurelius.titan.core.TitanGraph import com.thinkaurelius.titan.core.TitanGraph
import org.apache.hadoop.metadata.query.Expressions._ import org.apache.atlas.query.Expressions._
import org.apache.hadoop.metadata.typesystem.types.TypeSystem import org.apache.atlas.typesystem.types.TypeSystem
import org.junit.runner.RunWith import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner import org.scalatest.junit.JUnitRunner
import org.scalatest.{Assertions, BeforeAndAfterAll, FunSuite} import org.scalatest.{Assertions, BeforeAndAfterAll, FunSuite}
...@@ -65,7 +65,7 @@ class LineageQueryTest extends FunSuite with BeforeAndAfterAll { ...@@ -65,7 +65,7 @@ class LineageQueryTest extends FunSuite with BeforeAndAfterAll {
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"Table", "typeName":"Table",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
...@@ -189,7 +189,7 @@ class LineageQueryTest extends FunSuite with BeforeAndAfterAll { ...@@ -189,7 +189,7 @@ class LineageQueryTest extends FunSuite with BeforeAndAfterAll {
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"Table", "typeName":"Table",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
...@@ -407,7 +407,7 @@ class LineageQueryTest extends FunSuite with BeforeAndAfterAll { ...@@ -407,7 +407,7 @@ class LineageQueryTest extends FunSuite with BeforeAndAfterAll {
"superTypes":[ "superTypes":[
], ],
"hierarchicalMetaTypeName":"org.apache.hadoop.metadata.typesystem.types.ClassType", "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
"typeName":"Table", "typeName":"Table",
"attributeDefinitions":[ "attributeDefinitions":[
{ {
......
...@@ -16,9 +16,9 @@ ...@@ -16,9 +16,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import org.apache.hadoop.metadata.repository.BaseTest import org.apache.atlas.repository.BaseTest
import org.junit.{Before, Test} import org.junit.{Before, Test}
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.query package org.apache.atlas.query
import java.io.File import java.io.File
import javax.script.{Bindings, ScriptEngine, ScriptEngineManager} import javax.script.{Bindings, ScriptEngine, ScriptEngineManager}
...@@ -25,10 +25,10 @@ import com.google.common.collect.ImmutableList ...@@ -25,10 +25,10 @@ import com.google.common.collect.ImmutableList
import com.thinkaurelius.titan.core.{TitanFactory, TitanGraph} import com.thinkaurelius.titan.core.{TitanFactory, TitanGraph}
import com.tinkerpop.blueprints.Vertex import com.tinkerpop.blueprints.Vertex
import com.typesafe.config.{Config, ConfigFactory} import com.typesafe.config.{Config, ConfigFactory}
import org.apache.atlas.typesystem.types._
import org.apache.commons.configuration.{Configuration, ConfigurationException, MapConfiguration} import org.apache.commons.configuration.{Configuration, ConfigurationException, MapConfiguration}
import org.apache.commons.io.FileUtils import org.apache.commons.io.FileUtils
import org.apache.hadoop.metadata.typesystem.types._ import org.scalatest.{Assertions, BeforeAndAfterAll, FunSuite}
import org.scalatest.{BeforeAndAfterAll, FunSuite, Assertions}
trait GraphUtils { trait GraphUtils {
......
...@@ -87,7 +87,7 @@ def expandWebApp(dir): ...@@ -87,7 +87,7 @@ def expandWebApp(dir):
raise e raise e
pass pass
os.chdir(webAppMetadataDir) os.chdir(webAppMetadataDir)
jar(os.path.join(metadataDir(), "server", "webapp", "metadata.war")) jar(os.path.join(metadataDir(), "server", "webapp", "atlas.war"))
def dirMustExist(dirname): def dirMustExist(dirname):
if not os.path.exists(dirname): if not os.path.exists(dirname):
......
...@@ -19,7 +19,7 @@ import os ...@@ -19,7 +19,7 @@ import os
import sys import sys
import traceback import traceback
import metadata_config as mc import atlas_config as mc
METADATA_LOG_OPTS="-Dmetadata.log.dir=%s -Dmetadata.log.file=application.log" METADATA_LOG_OPTS="-Dmetadata.log.dir=%s -Dmetadata.log.file=application.log"
METADATA_COMMAND_OPTS="-Dmetadata.home=%s" METADATA_COMMAND_OPTS="-Dmetadata.home=%s"
...@@ -65,10 +65,10 @@ def main(): ...@@ -65,10 +65,10 @@ def main():
args = ["-app", os.path.join(web_app_dir, "metadata")] args = ["-app", os.path.join(web_app_dir, "metadata")]
args.extend(sys.argv[1:]) args.extend(sys.argv[1:])
process = mc.java("org.apache.hadoop.metadata.Main", args, metadata_classpath, jvm_opts_list, logdir) process = mc.java("org.apache.atlas.Main", args, metadata_classpath, jvm_opts_list, logdir)
mc.writePid(metadata_pid_file, process) mc.writePid(metadata_pid_file, process)
print "Metadata Server started!!!\n" print "Apache Atlas Server started!!!\n"
if __name__ == '__main__': if __name__ == '__main__':
try: try:
......
...@@ -20,7 +20,7 @@ from signal import SIGTERM ...@@ -20,7 +20,7 @@ from signal import SIGTERM
import sys import sys
import traceback import traceback
import metadata_config as mc import atlas_config as mc
def main(): def main():
......
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.tools.dsl package org.apache.atlas.tools.dsl
import org.apache.hadoop.metadata.typesystem.ITypedStruct import org.apache.atlas.typesystem.ITypedStruct
import org.apache.hadoop.metadata.typesystem.types.{StructType, TypeSystem} import org.apache.atlas.typesystem.types.{StructType, TypeSystem}
import scala.language.dynamics import scala.language.dynamics
......
...@@ -16,22 +16,19 @@ ...@@ -16,22 +16,19 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata package org.apache.atlas.tools
import java.text.SimpleDateFormat import org.apache.atlas.repository.memory.MemRepository
import org.apache.atlas.typesystem.json.{BigDecimalSerializer, BigIntegerSerializer, Serialization, TypedStructSerializer}
import org.apache.hadoop.metadata.typesystem.json.{BigDecimalSerializer, BigIntegerSerializer, Serialization, TypedStructSerializer} import org.apache.atlas.typesystem.persistence.StructInstance
import org.apache.hadoop.metadata.repository.memory.MemRepository import org.apache.atlas.typesystem.types._
import org.apache.hadoop.metadata.tools.dsl.DynamicTypedStruct import org.apache.atlas.typesystem.{IStruct, ITypedStruct}
import org.apache.hadoop.metadata.typesystem.persistence.StructInstance
import org.apache.hadoop.metadata.typesystem.types._
import org.apache.hadoop.metadata.typesystem.{IStruct, ITypedStruct}
import org.json4s._ import org.json4s._
import org.json4s.native.JsonMethods._ import org.json4s.native.JsonMethods._
import org.json4s.native.Serialization.{write => swrite} import org.json4s.native.Serialization.{write => swrite}
import scala.language.implicitConversions
import scala.collection.JavaConversions._ import scala.collection.JavaConversions._
import scala.language.implicitConversions
package object dsl { package object dsl {
......
...@@ -16,13 +16,13 @@ ...@@ -16,13 +16,13 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.tools.simpleserver package org.apache.atlas.tools.simpleserver
import akka.actor.{ActorSystem, Props} import akka.actor.{ActorSystem, Props}
import akka.io.IO import akka.io.IO
import com.typesafe.config.ConfigFactory import com.typesafe.config.ConfigFactory
import org.apache.hadoop.metadata.repository.memory.MemRepository import org.apache.atlas.repository.memory.MemRepository
import org.apache.hadoop.metadata.typesystem.types.TypeSystem import org.apache.atlas.typesystem.types.TypeSystem
import spray.can.Http import spray.can.Http
/** /**
...@@ -49,7 +49,7 @@ object Main extends App { ...@@ -49,7 +49,7 @@ object Main extends App {
val host = config.getString("http.host") val host = config.getString("http.host")
val port = config.getInt("http.port") val port = config.getInt("http.port")
implicit val system = ActorSystem("metadataservice") implicit val system = ActorSystem("atlasservice")
val typSys = TypeSystem.getInstance() val typSys = TypeSystem.getInstance()
val memRepo = new MemRepository(typSys) val memRepo = new MemRepository(typSys)
......
...@@ -16,16 +16,16 @@ ...@@ -16,16 +16,16 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.tools.simpleserver package org.apache.atlas.tools.simpleserver
import akka.actor._ import akka.actor._
import akka.util.Timeout import akka.util.Timeout
import com.google.common.collect.ImmutableList import com.google.common.collect.ImmutableList
import org.apache.hadoop.metadata.repository.memory.MemRepository import org.apache.atlas.repository.memory.MemRepository
import org.apache.hadoop.metadata.typesystem.json._ import org.apache.atlas.typesystem.json._
import org.apache.hadoop.metadata.typesystem.persistence.Id import org.apache.atlas.typesystem.persistence.Id
import org.apache.hadoop.metadata.typesystem.types._ import org.apache.atlas.typesystem.types._
import org.apache.hadoop.metadata.typesystem.{ITypedReferenceableInstance, TypesDef} import org.apache.atlas.typesystem.{ITypedReferenceableInstance, TypesDef}
import org.json4s.{Formats, NoTypeHints} import org.json4s.{Formats, NoTypeHints}
import spray.httpx.Json4sSupport import spray.httpx.Json4sSupport
...@@ -34,7 +34,7 @@ import scala.concurrent.duration._ ...@@ -34,7 +34,7 @@ import scala.concurrent.duration._
class MetadataActor(val typeSystem: TypeSystem, val memRepository: MemRepository) extends Actor with ActorLogging { class MetadataActor(val typeSystem: TypeSystem, val memRepository: MemRepository) extends Actor with ActorLogging {
import org.apache.hadoop.metadata.tools.simpleserver.MetadataProtocol._ import org.apache.atlas.tools.simpleserver.MetadataProtocol._
import scala.collection.JavaConversions._ import scala.collection.JavaConversions._
import scala.language.postfixOps import scala.language.postfixOps
......
...@@ -16,21 +16,22 @@ ...@@ -16,21 +16,22 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.tools.simpleserver package org.apache.atlas.tools.simpleserver
import akka.actor._ import akka.actor._
import akka.util.Timeout import akka.util.Timeout
import org.apache.hadoop.metadata.repository.memory.MemRepository; import org.apache.atlas.repository.memory.MemRepository
import org.apache.hadoop.metadata.typesystem.{TypesDef, ITypedReferenceableInstance} import org.apache.atlas.typesystem.persistence.Id
import org.apache.hadoop.metadata.typesystem.persistence.Id import org.apache.atlas.typesystem.types.TypeSystem
import org.apache.hadoop.metadata.typesystem.types.TypeSystem import org.apache.atlas.typesystem.{ITypedReferenceableInstance, TypesDef}
import spray.http.StatusCodes import spray.http.StatusCodes
import spray.routing._ import spray.routing._
import scala.concurrent.duration._ import scala.concurrent.duration._
class Responder(val typeSystem: TypeSystem, val memRepository : MemRepository, class Responder(val typeSystem: TypeSystem, val memRepository : MemRepository,
requestContext:RequestContext, mdSvc:ActorRef) extends Actor with Json4sProtocol with ActorLogging { requestContext:RequestContext, mdSvc:ActorRef) extends Actor with Json4sProtocol with ActorLogging {
import org.apache.hadoop.metadata.tools.simpleserver.MetadataProtocol._ import org.apache.atlas.tools.simpleserver.MetadataProtocol._
def receive = { def receive = {
...@@ -62,8 +63,9 @@ with RestApi { ...@@ -62,8 +63,9 @@ with RestApi {
trait RestApi extends HttpService with Json4sProtocol with ActorLogging { actor: Actor => trait RestApi extends HttpService with Json4sProtocol with ActorLogging { actor: Actor =>
import MetadataProtocol._ import MetadataProtocol._
import scala.language.postfixOps
import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.ExecutionContext.Implicits.global
import scala.language.postfixOps
val typeSystem : TypeSystem val typeSystem : TypeSystem
val memRepository : MemRepository val memRepository : MemRepository
......
...@@ -16,15 +16,14 @@ ...@@ -16,15 +16,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.tools.thrift package org.apache.atlas.tools.thrift
import org.apache.hadoop.metadata.MetadataException import org.apache.atlas.MetadataException
import org.apache.hadoop.metadata.typesystem.types.DataTypes import org.apache.atlas.typesystem.types.DataTypes
import scala.util.parsing.combinator.{ImplicitConversions, PackratParsers}
import scala.util.parsing.combinator.lexical.StdLexical import scala.util.parsing.combinator.lexical.StdLexical
import scala.util.parsing.combinator.syntactical.StandardTokenParsers import scala.util.parsing.combinator.syntactical.StandardTokenParsers
import scala.util.parsing.combinator.{ImplicitConversions, PackratParsers}
import scala.util.parsing.input.CharArrayReader._ import scala.util.parsing.input.CharArrayReader._
object BASE_TYPES extends Enumeration { object BASE_TYPES extends Enumeration {
...@@ -386,7 +385,7 @@ trait ThriftConstantRules extends ThriftKeywords { ...@@ -386,7 +385,7 @@ trait ThriftConstantRules extends ThriftKeywords {
/** /**
* A Parser for Thrift definition scripts. * A Parser for Thrift definition scripts.
* Based on [[https://github.com/twitter/commons/blob/master/src/antlr/twitter/thrift/descriptors/AntlrThrift.g]]. * Based on [[https://github.com/twitter/commons/blob/master/src/antlr/twitter/thrift/descriptors/AntlrThrift.g]].
* Definition is parsed into a [[org.apache.hadoop.metadata.tools.thrift.ThriftDef ThriftDef]] structure. * Definition is parsed into a [[org.apache.atlas.tools.thrift.ThriftDef ThriftDef]] structure.
* *
* @example {{{ * @example {{{
* var p = new ThriftParser * var p = new ThriftParser
...@@ -398,7 +397,7 @@ trait ThriftConstantRules extends ThriftKeywords { ...@@ -398,7 +397,7 @@ trait ThriftConstantRules extends ThriftKeywords {
* }}} * }}}
* *
* @todo doesn't traverse includes directives. Includes are parsed into * @todo doesn't traverse includes directives. Includes are parsed into
* [[org.apache.hadoop.metadata.tools.thrift.IncludeDef IncludeDef]] structures * [[org.apache.atlas.tools.thrift.IncludeDef IncludeDef]] structures
* but are not traversed. * but are not traversed.
* @todo mixing in [[scala.util.parsing.combinator.PackratParsers PackratParsers]] is a placeholder. Need to * @todo mixing in [[scala.util.parsing.combinator.PackratParsers PackratParsers]] is a placeholder. Need to
* change specific grammar rules to `lazy val` and `Parser[Elem]` to `PackratParser[Elem]`. Will do based on * change specific grammar rules to `lazy val` and `Parser[Elem]` to `PackratParser[Elem]`. Will do based on
......
...@@ -16,12 +16,12 @@ ...@@ -16,12 +16,12 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.tools.thrift package org.apache.atlas.tools.thrift
import com.google.common.collect.ImmutableList import com.google.common.collect.ImmutableList
import org.apache.hadoop.metadata.typesystem.TypesDef import org.apache.atlas.MetadataException
import org.apache.hadoop.metadata.typesystem.types._ import org.apache.atlas.typesystem.TypesDef
import org.apache.hadoop.metadata.MetadataException import org.apache.atlas.typesystem.types.{DataTypes, HierarchicalTypeDefinition, Multiplicity, TraitType, _}
import org.slf4j.{Logger, LoggerFactory} import org.slf4j.{Logger, LoggerFactory}
import scala.io.Source import scala.io.Source
...@@ -31,7 +31,7 @@ import scala.util.{Failure, Success, Try} ...@@ -31,7 +31,7 @@ import scala.util.{Failure, Success, Try}
case class CompositeRelation(typeName: String, fieldName: String, reverseFieldName: Option[String]) case class CompositeRelation(typeName: String, fieldName: String, reverseFieldName: Option[String])
/** /**
* Convert a [[org.apache.hadoop.metadata.tools.thrift.ThriftDef ThriftDef]] to * Convert a [[ThriftDef ThriftDef]] to
* [[TypesDef TypesDef]]. Currently there are several restrictions: * [[TypesDef TypesDef]]. Currently there are several restrictions:
* *
* - CppIncludes, SEnums are not allowed * - CppIncludes, SEnums are not allowed
......
...@@ -16,12 +16,11 @@ ...@@ -16,12 +16,11 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.tools.dsl package org.apache.atlas.tools.dsl
import org.apache.hadoop.metadata.dsl._ import org.apache.atlas.tools.hive.HiveMockMetadataService
import org.apache.hadoop.metadata.tools.hive.HiveMockMetadataService import org.apache.atlas.typesystem.types.utils.TypesUtil
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil import org.apache.atlas.typesystem.types.{DataTypes, StructType, TypeSystem}
import org.apache.hadoop.metadata.typesystem.types.{DataTypes, StructType, TypeSystem}
import org.json4s.native.JsonMethods._ import org.json4s.native.JsonMethods._
import org.junit.{Assert, Before, Test} import org.junit.{Assert, Before, Test}
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.tools.hive package org.apache.atlas.tools.hive
object HiveMockMetadataService { object HiveMockMetadataService {
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.tools.thrift package org.apache.atlas.tools.thrift
import org.junit.{Assert, Test} import org.junit.{Assert, Test}
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.tools.thrift package org.apache.atlas.tools.thrift
import com.google.gson.JsonParser import com.google.gson.JsonParser
import org.json4s.native.JsonMethods._ import org.json4s.native.JsonMethods._
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment