Commit d75aeb5d by Suma Shivaprasad

Fixed Uts for listener improvements

parents 720957e8 3e1637a4
Apache Metadata Governance is an effort undergoing incubation at the Apache
Apache Atlas is an effort undergoing incubation at the Apache
Software Foundation (ASF), sponsored by the Apache Incubator PMC.
Incubation is required of all newly accepted projects until a further
......@@ -10,7 +10,7 @@ While incubation status is not necessarily a reflection of the
completeness or stability of the code, it does indicate that the
project has yet to be fully endorsed by the ASF.
For more information about the incubation status of the Metadata Governance
For more information about the incubation status of the Apache Atlas
project you can go to the following page:
http://incubator.apache.org/projects/Metadata-Governance.html
\ No newline at end of file
http://incubator.apache.org/projects/atlas.html
\ No newline at end of file
......@@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
A. Building & Installing Metadata
A. Building & Installing Atlas
=================================
0. Prerequisites
......@@ -26,14 +26,14 @@ You would need the following installed:
* Maven 3.x
1. Building Metadata
1. Building Atlas
--------------------
Building DGI from the source repository
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* git clone git@github.com:hortonworks/metadata.git metadata
* cd metadata
* git clone git@github.com:hortonworks/atlas.git atlas
* cd atlas
* export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=256m" && mvn clean install
......@@ -44,13 +44,13 @@ Once the build successfully completes, artifacts can be packaged for deployment.
* mvn clean assembly:assembly -DskipTests -DskipITs
Tar can be found in {project dir}/target/apache-metadata-${project.version}-bin.tar.gz
Tar can be found in {project dir}/target/apache-atlas-${project.version}-bin.tar.gz
Tar is structured as follows
|- bin
|- metadata-start.sh
|- metadata-stop.sh
|- atlas-start.sh
|- atlas-stop.sh
|- conf
|- application.properties
|- graph.properties
......@@ -58,7 +58,7 @@ Tar is structured as follows
|- docs
|- server
|- webapp
|- metadata.war
|- atlas.war
|- README
|- NOTICE.txt
|- LICENSE.txt
......@@ -71,13 +71,13 @@ Tar is structured as follows
a. Installing DGI
~~~~~~~~~~~~~~~~~~~~~~
* tar -xzvf apache-metadata-${project.version}-bin.tar.gz
* cd metadata-${project.version}
* tar -xzvf apache-atlas-${project.version}-bin.tar.gz
* cd atlas-${project.version}
b. Starting DGI Server
~~~~~~~~~~~~~~~~~~~~~~~~~
* bin/metadata-start.sh
* bin/atlas-start.sh
c. Using DGI
~~~~~~~~~~~~~~~
......@@ -102,4 +102,4 @@ c. Using DGI
d. Stopping DGI Server
~~~~~~~~~~~~~~~~~~~~~~~~~
* bin/metadata-stop.sh
* bin/atlas-stop.sh
......@@ -18,8 +18,8 @@
package org.apache.atlas.hive.bridge;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.MetadataServiceException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.hive.model.HiveDataModelGenerator;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.typesystem.Referenceable;
......@@ -64,7 +64,7 @@ public class HiveMetaStoreBridge {
private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreBridge.class);
private final Hive hiveClient;
private final MetadataServiceClient metadataServiceClient;
private final AtlasClient atlasClient;
/**
* Construct a HiveMetaStoreBridge.
......@@ -73,11 +73,11 @@ public class HiveMetaStoreBridge {
public HiveMetaStoreBridge(HiveConf hiveConf) throws Exception {
clusterName = hiveConf.get(HIVE_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);
hiveClient = Hive.get(hiveConf);
metadataServiceClient = new MetadataServiceClient(hiveConf.get(DGI_URL_PROPERTY, DEFAULT_DGI_URL));
atlasClient = new AtlasClient(hiveConf.get(DGI_URL_PROPERTY, DEFAULT_DGI_URL));
}
public MetadataServiceClient getMetadataServiceClient() {
return metadataServiceClient;
public AtlasClient getAtlasClient() {
return atlasClient;
}
public void importHiveMetadata() throws Exception {
......@@ -124,8 +124,8 @@ public class HiveMetaStoreBridge {
String entityJSON = InstanceSerialization.toJson(referenceable, true);
LOG.debug("Submitting new entity {} = {}", referenceable.getTypeName(), entityJSON);
JSONObject jsonObject = metadataServiceClient.createEntity(entityJSON);
String guid = jsonObject.getString(MetadataServiceClient.GUID);
JSONObject jsonObject = atlasClient.createEntity(entityJSON);
String guid = jsonObject.getString(AtlasClient.GUID);
LOG.debug("created instance for type " + typeName + ", guid: " + guid);
return new Referenceable(guid, referenceable.getTypeName(), null);
......@@ -179,7 +179,7 @@ public class HiveMetaStoreBridge {
}
private Referenceable getEntityReferenceFromDSL(String typeName, String dslQuery) throws Exception {
MetadataServiceClient dgiClient = getMetadataServiceClient();
AtlasClient dgiClient = getAtlasClient();
JSONArray results = dgiClient.searchByDSL(dslQuery);
if (results.length() == 0) {
return null;
......@@ -216,11 +216,12 @@ public class HiveMetaStoreBridge {
return getEntityReferenceFromDSL(typeName, dslQuery);
}
private Referenceable getEntityReferenceFromGremlin(String typeName, String gremlinQuery) throws MetadataServiceException,
private Referenceable getEntityReferenceFromGremlin(String typeName, String gremlinQuery) throws
AtlasServiceException,
JSONException {
MetadataServiceClient client = getMetadataServiceClient();
AtlasClient client = getAtlasClient();
JSONObject response = client.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
JSONArray results = response.getJSONArray(AtlasClient.RESULTS);
if (results.length() == 0) {
return null;
}
......@@ -238,7 +239,7 @@ public class HiveMetaStoreBridge {
// + "dbName where name = '%s' and clusterName = '%s' select p", typeName, valuesStr, tableName,
// dbName, clusterName);
String datasetType = MetadataServiceClient.DATA_SET_SUPER_TYPE;
String datasetType = AtlasClient.DATA_SET_SUPER_TYPE;
String tableEntityName = getTableName(clusterName, dbName, tableName);
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.values', %s).as('p')."
......@@ -254,7 +255,7 @@ public class HiveMetaStoreBridge {
throw new IllegalArgumentException("Table " + dbName + "." + tableName + " doesn't exist");
}
MetadataServiceClient dgiClient = getMetadataServiceClient();
AtlasClient dgiClient = getAtlasClient();
Referenceable tableInstance = dgiClient.getEntity(tableRef.getId().id);
Id sdId = (Id) tableInstance.get("sd");
return new Referenceable(sdId.id, sdId.getTypeName(), null);
......@@ -485,7 +486,7 @@ public class HiveMetaStoreBridge {
public synchronized void registerHiveDataModel() throws Exception {
HiveDataModelGenerator dataModelGenerator = new HiveDataModelGenerator();
MetadataServiceClient dgiClient = getMetadataServiceClient();
AtlasClient dgiClient = getAtlasClient();
//Register hive data model if its not already registered
if (dgiClient.getType(HiveDataTypes.HIVE_PROCESS.getName()) == null ) {
......@@ -502,8 +503,8 @@ public class HiveMetaStoreBridge {
hiveMetaStoreBridge.importHiveMetadata();
}
public void updateTable(Referenceable tableReferenceable, Table newTable) throws MetadataServiceException {
MetadataServiceClient client = getMetadataServiceClient();
public void updateTable(Referenceable tableReferenceable, Table newTable) throws AtlasServiceException {
AtlasClient client = getAtlasClient();
client.updateEntity(tableReferenceable.getId()._getId(), HiveDataModelGenerator.TABLE_NAME,
newTable.getTableName().toLowerCase());
client.updateEntity(tableReferenceable.getId()._getId(), HiveDataModelGenerator.NAME,
......
......@@ -19,8 +19,8 @@
package org.apache.atlas.hive.model;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataException;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.typesystem.TypesDef;
import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.atlas.typesystem.types.AttributeDefinition;
......@@ -73,7 +73,7 @@ public class HiveDataModelGenerator {
structTypeDefinitionMap = new HashMap<>();
}
public void createDataModel() throws MetadataException {
public void createDataModel() throws AtlasException {
LOG.info("Generating the Hive Data Model....");
// enums
......@@ -130,7 +130,7 @@ public class HiveDataModelGenerator {
return ImmutableList.of();
}
private void createHiveObjectTypeEnum() throws MetadataException {
private void createHiveObjectTypeEnum() throws AtlasException {
EnumValue values[] = {
new EnumValue("GLOBAL", 1),
new EnumValue("DATABASE", 2),
......@@ -145,7 +145,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_OBJECT_TYPE.getName());
}
private void createHivePrincipalTypeEnum() throws MetadataException {
private void createHivePrincipalTypeEnum() throws AtlasException {
EnumValue values[] = {
new EnumValue("USER", 1),
new EnumValue("ROLE", 2),
......@@ -159,7 +159,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName());
}
private void createResourceTypeEnum() throws MetadataException {
private void createResourceTypeEnum() throws AtlasException {
EnumValue values[] = {
new EnumValue("JAR", 1),
new EnumValue("FILE", 2),
......@@ -171,7 +171,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_RESOURCE_TYPE.getName());
}
private void createSerDeStruct() throws MetadataException {
private void createSerDeStruct() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
......@@ -186,7 +186,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_SERDE.getName());
}
private void createOrderStruct() throws MetadataException {
private void createOrderStruct() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("col", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -200,7 +200,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_ORDER.getName());
}
private void createStorageDescClass() throws MetadataException {
private void createStorageDescClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("cols",
String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.getName()),
......@@ -239,7 +239,7 @@ public class HiveDataModelGenerator {
/** Revisit later after nested array types are handled by the typesystem **/
private void createResourceUriStruct() throws MetadataException {
private void createResourceUriStruct() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("resourceType", HiveDataTypes.HIVE_RESOURCE_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -252,7 +252,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_RESOURCEURI.getName());
}
private void createDBClass() throws MetadataException {
private void createDBClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -277,7 +277,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_DB.getName());
}
private void createTypeClass() throws MetadataException {
private void createTypeClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -296,7 +296,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_TYPE.getName());
}
private void createColumnClass() throws MetadataException {
private void createColumnClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -313,7 +313,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_COLUMN.getName());
}
private void createPartitionClass() throws MetadataException {
private void createPartitionClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("values", DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
Multiplicity.OPTIONAL, false, null),
......@@ -338,7 +338,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PARTITION.getName());
}
private void createTableClass() throws MetadataException {
private void createTableClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(TABLE_NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -380,7 +380,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_TABLE.getName());
}
private void createIndexClass() throws MetadataException {
private void createIndexClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -406,12 +406,12 @@ public class HiveDataModelGenerator {
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_INDEX.getName(),
ImmutableList.of(MetadataServiceClient.DATA_SET_SUPER_TYPE), attributeDefinitions);
ImmutableList.of(AtlasClient.DATA_SET_SUPER_TYPE), attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_INDEX.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_INDEX.getName());
}
private void createRoleClass() throws MetadataException {
private void createRoleClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("roleName", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -427,7 +427,7 @@ public class HiveDataModelGenerator {
LOG.debug("Created definition for " + HiveDataTypes.HIVE_ROLE.getName());
}
private void createProcessClass() throws MetadataException {
private void createProcessClass() throws AtlasException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("startTime", DataTypes.LONG_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -447,12 +447,12 @@ public class HiveDataModelGenerator {
HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
ClassType.class, HiveDataTypes.HIVE_PROCESS.getName(),
ImmutableList.of(MetadataServiceClient.PROCESS_SUPER_TYPE), attributeDefinitions);
ImmutableList.of(AtlasClient.PROCESS_SUPER_TYPE), attributeDefinitions);
classTypeDefinitions.put(HiveDataTypes.HIVE_PROCESS.getName(), definition);
LOG.debug("Created definition for " + HiveDataTypes.HIVE_PROCESS.getName());
}
public String getModelAsJson() throws MetadataException {
public String getModelAsJson() throws AtlasException {
createDataModel();
return getDataModelAsJSON();
}
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.hive.hook;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataModelGenerator;
import org.apache.atlas.hive.model.HiveDataTypes;
......@@ -48,7 +48,7 @@ public class HiveHookIT {
private static final String CLUSTER_NAME = "test";
public static final String DEFAULT_DB = "default";
private Driver driver;
private MetadataServiceClient dgiCLient;
private AtlasClient dgiCLient;
private SessionState ss;
@BeforeClass
......@@ -60,7 +60,7 @@ public class HiveHookIT {
ss = SessionState.start(ss);
SessionState.setCurrentSessionState(ss);
dgiCLient = new MetadataServiceClient(DGI_URL);
dgiCLient = new AtlasClient(DGI_URL);
}
private HiveConf getHiveConf() {
......@@ -286,7 +286,7 @@ public class HiveHookIT {
String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()",
typeName, typeName, normalize(queryStr));
JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
JSONArray results = response.getJSONArray(AtlasClient.RESULTS);
Assert.assertEquals(results.length(), 1);
}
......@@ -332,7 +332,7 @@ public class HiveHookIT {
+ ".has('%s.clusterName', '%s').back('p').toList()", typeName, typeName, value, typeName,
tableType, tableName.toLowerCase(), tableType, dbType, dbName.toLowerCase(), dbType, CLUSTER_NAME);
JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
JSONArray results = response.getJSONArray(AtlasClient.RESULTS);
Assert.assertEquals(results.length(), 1);
}
......
......@@ -18,8 +18,8 @@
package org.apache.atlas.hive.hook;
import org.apache.atlas.MetadataException;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.security.SecurityProperties;
......@@ -66,7 +66,7 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
private static final String DGI_URL = "https://localhost:21443/";
private Driver driver;
private MetadataServiceClient dgiCLient;
private AtlasClient dgiCLient;
private SessionState ss;
private TestSecureEmbeddedServer secureEmbeddedServer;
private Subject subject;
......@@ -128,9 +128,9 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
configuration.save(new FileWriter(persistDir + File.separator + "application.properties"));
dgiCLient = new MetadataServiceClient(DGI_URL) {
dgiCLient = new AtlasClient(DGI_URL) {
@Override
protected PropertiesConfiguration getClientProperties() throws MetadataException {
protected PropertiesConfiguration getClientProperties() throws AtlasException {
return configuration;
}
};
......
......@@ -18,8 +18,8 @@
package org.apache.atlas.hive.hook;
import org.apache.atlas.MetadataException;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.security.SecurityProperties;
......@@ -61,7 +61,7 @@ import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_PASSWORD_K
public class SSLHiveHookIT {
private static final String DGI_URL = "https://localhost:21443/";
private Driver driver;
private MetadataServiceClient dgiCLient;
private AtlasClient dgiCLient;
private SessionState ss;
private Path jksPath;
private String providerUrl;
......@@ -117,9 +117,9 @@ public class SSLHiveHookIT {
configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
dgiCLient = new MetadataServiceClient(DGI_URL) {
dgiCLient = new AtlasClient(DGI_URL) {
@Override
protected PropertiesConfiguration getClientProperties() throws MetadataException {
protected PropertiesConfiguration getClientProperties() throws AtlasException {
return configuration;
}
};
......
......@@ -20,14 +20,14 @@ package org.apache.atlas;
import com.sun.jersey.api.client.ClientResponse;
public class MetadataServiceException extends Exception {
public class AtlasServiceException extends Exception {
private ClientResponse.Status status;
public MetadataServiceException(MetadataServiceClient.API api, Exception e) {
public AtlasServiceException(AtlasClient.API api, Exception e) {
super("Metadata service API " + api + " failed", e);
}
public MetadataServiceException(MetadataServiceClient.API api, ClientResponse response) {
public AtlasServiceException(AtlasClient.API api, ClientResponse response) {
super("Metadata service API " + api + " failed with status " +
response.getClientResponseStatus().getStatusCode() + "(" +
response.getClientResponseStatus().getReasonPhrase() + ") Response Body (" +
......@@ -35,7 +35,7 @@ public class MetadataServiceException extends Exception {
this.status = response.getClientResponseStatus();
}
public MetadataServiceException(Exception e) {
public AtlasServiceException(Exception e) {
super(e);
}
......
......@@ -31,15 +31,15 @@ public class PropertiesUtil {
private static final String APPLICATION_PROPERTIES = "application.properties";
public static final String CLIENT_PROPERTIES = "client.properties";
public static PropertiesConfiguration getApplicationProperties() throws MetadataException {
public static PropertiesConfiguration getApplicationProperties() throws AtlasException {
return getPropertiesConfiguration(APPLICATION_PROPERTIES);
}
public static PropertiesConfiguration getClientProperties() throws MetadataException {
public static PropertiesConfiguration getClientProperties() throws AtlasException {
return getPropertiesConfiguration(CLIENT_PROPERTIES);
}
private static PropertiesConfiguration getPropertiesConfiguration(String name) throws MetadataException {
private static PropertiesConfiguration getPropertiesConfiguration(String name) throws AtlasException {
String confLocation = System.getProperty("atlas.conf");
URL url;
try {
......@@ -51,7 +51,7 @@ public class PropertiesUtil {
LOG.info("Loading {} from {}", name, url);
return new PropertiesConfiguration(url);
} catch (Exception e) {
throw new MetadataException("Failed to load application properties", e);
throw new AtlasException("Failed to load application properties", e);
}
}
......
......@@ -19,7 +19,7 @@ package org.apache.atlas.security;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.PropertiesUtil;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.conf.Configuration;
......@@ -146,7 +146,7 @@ public class SecureClientUtils {
connection.setReadTimeout(socketTimeout);
}
private static File getSSLClientFile() throws MetadataException {
private static File getSSLClientFile() throws AtlasException {
String confLocation = System.getProperty("atlas.conf");
File sslDir;
try {
......@@ -163,12 +163,12 @@ public class SecureClientUtils {
}
LOG.info("ssl-client.xml will be created in {}", sslDir);
} catch (Exception e) {
throw new MetadataException("Failed to find client configuration directory", e);
throw new AtlasException("Failed to find client configuration directory", e);
}
return new File(sslDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
}
public static void persistSSLClientConfiguration(PropertiesConfiguration clientConfig) throws MetadataException, IOException {
public static void persistSSLClientConfiguration(PropertiesConfiguration clientConfig) throws AtlasException, IOException {
//trust settings
Configuration configuration = new Configuration(false);
File sslClientFile = getSSLClientFile();
......
......@@ -18,8 +18,8 @@ This section sets up the graph db - titan - to use a persistence engine. Please
details. The example below uses BerkeleyDBJE.
<verbatim>
metadata.graph.storage.backend=berkeleyje
metadata.graph.storage.directory=data/berkley
atlas.graph.storage.backend=berkeleyje
atlas.graph.storage.directory=data/berkley
</verbatim>
---++++ Graph Search Index
......@@ -27,11 +27,11 @@ This section sets up the graph db - titan - to use an search indexing system. Th
configuration below setsup to use an embedded Elastic search indexing system.
<verbatim>
metadata.graph.index.search.backend=elasticsearch
metadata.graph.index.search.directory=data/es
metadata.graph.index.search.elasticsearch.client-only=false
metadata.graph.index.search.elasticsearch.local-mode=true
metadata.graph.index.search.elasticsearch.create.sleep=2000
atlas.graph.index.search.backend=elasticsearch
atlas.graph.index.search.directory=data/es
atlas.graph.index.search.elasticsearch.client-only=false
atlas.graph.index.search.elasticsearch.local-mode=true
atlas.graph.index.search.elasticsearch.create.sleep=2000
</verbatim>
---+++ Hive Lineage Configs
......@@ -40,13 +40,13 @@ section encodes the specific types for the hive data model.
# This models reflects the base super types for Data and Process
<verbatim>
metadata.lineage.hive.table.type.name=DataSet
metadata.lineage.hive.process.type.name=Process
metadata.lineage.hive.process.inputs.name=inputs
metadata.lineage.hive.process.outputs.name=outputs
atlas.lineage.hive.table.type.name=DataSet
atlas.lineage.hive.process.type.name=Process
atlas.lineage.hive.process.inputs.name=inputs
atlas.lineage.hive.process.outputs.name=outputs
## Schema
metadata.lineage.hive.table.schema.query=hive_table where name=?, columns
atlas.lineage.hive.table.schema.query=hive_table where name=?, columns
</verbatim>
---+++ Security Properties
......@@ -55,6 +55,6 @@ metadata.lineage.hive.table.schema.query=hive_table where name=?, columns
The following property is used to toggle the SSL feature.
<verbatim>
metadata.enableTLS=false
atlas.enableTLS=false
</verbatim>
---+ Security Features of the Data Governance and Metadata platform for Hadoop
---+ Security Features of Apache Atlas
---++ Overview
......@@ -12,7 +12,7 @@ The following features are available for enhancing the security of the platform:
Both SSL one-way (server authentication) and two-way (server and client authentication) are supported. The following application properties (properties configured in the application.properties file) are available for configuring SSL:
* <code>metadata.enableTLS</code> (false|true) [default: false] - enable/disable the SSL listener
* <code>atlas.enableTLS</code> (false|true) [default: false] - enable/disable the SSL listener
* <code>keystore.file</code> - the path to the keystore file leveraged by the server. This file contains the server certificate.
* <code>truststore.file</code> - the path to the truststore file. This file contains the certificates of other trusted entities (e.g. the certificates for client processes if two-way SSL is enabled). In most instances this can be set to the same value as the keystore.file property (especially if one-way SSL is enabled).
* <code>client.auth.enabled</code> (false|true) [default: false] - enable/disable client authentication. If enabled, the client will have to authenticate to the server during the transport session key creation process (i.e. two-way SSL is in effect).
......@@ -38,9 +38,9 @@ The DGI platform, upon startup, is associated to an authenticated identity. By
The properties for configuring service authentication are:
* <code>metadata.authentication.method</code> (simple|kerberos) [default: simple] - the authentication method to utilize. Simple will leverage the OS authenticated identity and is the default mechanism. 'kerberos' indicates that the service is required to authenticate to the KDC leveraging the configured keytab and principal.
* <code>metadata.authentication.keytab</code> - the path to the keytab file.
* <code>metadata.authentication.principal</code> - the principal to use for authenticating to the KDC. The principal is generally of the form "user/host@realm". You may use the '_HOST' token for the hostname and the local hostname will be substituted in by the runtime (e.g. "dgi/_HOST@EXAMPLE.COM").
* <code>atlas.authentication.method</code> (simple|kerberos) [default: simple] - the authentication method to utilize. Simple will leverage the OS authenticated identity and is the default mechanism. 'kerberos' indicates that the service is required to authenticate to the KDC leveraging the configured keytab and principal.
* <code>atlas.authentication.keytab</code> - the path to the keytab file.
* <code>atlas.authentication.principal</code> - the principal to use for authenticating to the KDC. The principal is generally of the form "user/host@realm". You may use the '_HOST' token for the hostname and the local hostname will be substituted in by the runtime (e.g. "dgi/_HOST@EXAMPLE.COM").
---+++ SPNEGO-based HTTP Authentication
......@@ -53,28 +53,28 @@ The kerberos support requires the client accessing the server to first authentic
The properties for configuring the SPNEGO support are:
* <code>metadata.http.authentication.enabled</code> (true|false) [default: false] - a property indicating whether to enable HTTP authentication
* <code>metadata.http.authentication.type</code> (simple|kerberos) [default: simple] - the authentication type
* <code>metadata.http.authentication.kerberos.principal</code> - the web-application Kerberos principal name. The Kerberos principal name must start with "HTTP/...". For example: "HTTP/localhost@LOCALHOST". There is no default value.
* <code>metadata.http.authentication.kerberos.keytab</code> - the path to the keytab file containing the credentials for the kerberos principal.
* <code>atlas.http.authentication.enabled</code> (true|false) [default: false] - a property indicating whether to enable HTTP authentication
* <code>atlas.http.authentication.type</code> (simple|kerberos) [default: simple] - the authentication type
* <code>atlas.http.authentication.kerberos.principal</code> - the web-application Kerberos principal name. The Kerberos principal name must start with "HTTP/...". For example: "HTTP/localhost@LOCALHOST". There is no default value.
* <code>atlas.http.authentication.kerberos.keytab</code> - the path to the keytab file containing the credentials for the kerberos principal.
For a more detailed discussion of the HTTP authentication mechanism refer to [[http://hadoop.apache.org/docs/stable/hadoop-auth/Configuration.html][Hadoop Auth, Java HTTP SPNEGO 2.6.0 - Server Side Configuration]]. The prefix that document references is "metadata.http.authentication" in the case of the DGI authentication implementation.
For a more detailed discussion of the HTTP authentication mechanism refer to [[http://hadoop.apache.org/docs/stable/hadoop-auth/Configuration.html][Hadoop Auth, Java HTTP SPNEGO 2.6.0 - Server Side Configuration]]. The prefix that document references is "atlas.http.authentication" in the case of the DGI authentication implementation.
---+++ Client security configuration
When leveraging Atlas client code to communicate with an Atlas server configured for SSL transport and/or Kerberos authentication, there is a requirement to provide a client configuration file that provides the security properties that allow for communication with, or authenticating to, the server.
Create a client.properties file with the appropriate settings (see below) and place it on the client's classpath or in the directory specified by the "metadata.conf" system property.
Create a client.properties file with the appropriate settings (see below) and place it on the client's classpath or in the directory specified by the "atlas.conf" system property.
The client properties for SSL communication are:
* <code>metadata.enableTLS</code> (false|true) [default: false] - enable/disable the SSL client communication infrastructure.
* <code>atlas.enableTLS</code> (false|true) [default: false] - enable/disable the SSL client communication infrastructure.
* <code>keystore.file</code> - the path to the keystore file leveraged by the client. This file is only required if 2-Way SSL is enabled at the server and contains the client certificate.
* <code>truststore.file</code> - the path to the truststore file. This file contains the certificates of trusted entities (e.g. the certificates for the server or a shared certification authority). This file is required for both one-way or two-way SSL.
* <code>cert.stores.credential.provider.path</code> - the path to the Credential Provider store file. The passwords for the keystore, truststore, and client certificate are maintained in this secure file.
The property required for authenticating to the server (if authentication is enabled):
* <code>metadata.http.authentication.type</code> (simple|kerberos) [default: simple] - the authentication type
* <code>atlas.http.authentication.type</code> (simple|kerberos) [default: simple] - the authentication type
If the authentication type specified is 'kerberos', then the kerberos ticket cache will be accessed for authenticating to the server (Therefore the client is required to authenticate to the KDC prior to communication with the server using 'kinit' or a similar mechanism).
......
......@@ -65,9 +65,9 @@
<inceptionYear>2015</inceptionYear>
<scm>
<connection>scm:git:https://github.com/hortonworks/metadata</connection>
<developerConnection>scm:git:https://github.com/hortonworks/metadata</developerConnection>
<url>https://github.com/hortonworks/metadata</url>
<connection>scm:git:https://github.com/apache/incubator-atlas</connection>
<developerConnection>scm:git:https://github.com/apache/incubator-atlas</developerConnection>
<url>https://github.com/apache/incubator-atlas</url>
</scm>
<properties>
......
......@@ -63,7 +63,7 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
bind(ITypeStore.class).to(GraphBackedTypeStore.class).asEagerSingleton();
Multibinder<TypesChangeListener> typesChangeListenerBinder = Multibinder.newSetBinder(binder(), TypesChangeListener.class);
typesChangeListenerBinder.addBinding().toProvider(GraphBackedSearchIndexer.class);
typesChangeListenerBinder.addBinding().to(GraphBackedSearchIndexer.class);
// bind the MetadataService interface to an implementation
bind(MetadataService.class).to(DefaultMetadataService.class).asEagerSingleton();
......
......@@ -18,11 +18,11 @@
package org.apache.atlas.discovery;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import java.security.PrivilegedActionException;
public class DiscoveryException extends MetadataException {
public class DiscoveryException extends AtlasException {
/**
* Constructs a new exception with the specified detail message. The
......
......@@ -20,7 +20,7 @@ package org.apache.atlas.discovery;
import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.ParamChecker;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.discovery.graph.DefaultGraphPersistenceStrategy;
......@@ -79,8 +79,8 @@ public class HiveLineageService implements LineageService {
"hive_table where name=\"%s\", columns");
HIVE_TABLE_EXISTS_QUERY = conf.getString(
"atlas.lineage.hive.table.exists.query",
"from hive_table where name=\"%s\"");
} catch (MetadataException e) {
"from " + HIVE_TABLE_TYPE_NAME + " where name=\"%s\"");
} catch (AtlasException e) {
throw new RuntimeException(e);
}
}
......@@ -107,7 +107,7 @@ public class HiveLineageService implements LineageService {
*/
@Override
@GraphTransaction
public String getOutputs(String tableName) throws MetadataException {
public String getOutputs(String tableName) throws AtlasException {
LOG.info("Fetching lineage outputs for tableName={}", tableName);
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
......@@ -135,7 +135,7 @@ public class HiveLineageService implements LineageService {
*/
@Override
@GraphTransaction
public String getOutputsGraph(String tableName) throws MetadataException {
public String getOutputsGraph(String tableName) throws AtlasException {
LOG.info("Fetching lineage outputs graph for tableName={}", tableName);
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
......@@ -156,7 +156,7 @@ public class HiveLineageService implements LineageService {
*/
@Override
@GraphTransaction
public String getInputs(String tableName) throws MetadataException {
public String getInputs(String tableName) throws AtlasException {
LOG.info("Fetching lineage inputs for tableName={}", tableName);
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
......@@ -184,7 +184,7 @@ public class HiveLineageService implements LineageService {
*/
@Override
@GraphTransaction
public String getInputsGraph(String tableName) throws MetadataException {
public String getInputsGraph(String tableName) throws AtlasException {
LOG.info("Fetching lineage inputs graph for tableName={}", tableName);
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
......@@ -205,7 +205,7 @@ public class HiveLineageService implements LineageService {
*/
@Override
@GraphTransaction
public String getSchema(String tableName) throws MetadataException {
public String getSchema(String tableName) throws AtlasException {
LOG.info("Fetching schema for tableName={}", tableName);
ParamChecker.notEmpty(tableName, "table name cannot be null");
validateTableExists(tableName);
......@@ -219,7 +219,7 @@ public class HiveLineageService implements LineageService {
*
* @param tableName table name
*/
private void validateTableExists(String tableName) throws MetadataException {
private void validateTableExists(String tableName) throws AtlasException {
final String tableExistsQuery = String.format(HIVE_TABLE_EXISTS_QUERY, tableName);
GremlinQueryResult queryResult = discoveryService.evaluate(tableExistsQuery);
if (!(queryResult.rows().length() > 0)) {
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.discovery;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
/**
* Lineage service interface.
......@@ -31,7 +31,7 @@ public interface LineageService {
* @param tableName tableName
* @return Outputs as JSON
*/
String getOutputs(String tableName) throws MetadataException;
String getOutputs(String tableName) throws AtlasException;
/**
* Return the lineage outputs graph for the given tableName.
......@@ -39,7 +39,7 @@ public interface LineageService {
* @param tableName tableName
* @return Outputs Graph as JSON
*/
String getOutputsGraph(String tableName) throws MetadataException;
String getOutputsGraph(String tableName) throws AtlasException;
/**
* Return the lineage inputs for the given tableName.
......@@ -47,7 +47,7 @@ public interface LineageService {
* @param tableName tableName
* @return Inputs as JSON
*/
String getInputs(String tableName) throws MetadataException;
String getInputs(String tableName) throws AtlasException;
/**
* Return the lineage inputs graph for the given tableName.
......@@ -55,7 +55,7 @@ public interface LineageService {
* @param tableName tableName
* @return Inputs Graph as JSON
*/
String getInputsGraph(String tableName) throws MetadataException;
String getInputsGraph(String tableName) throws AtlasException;
/**
* Return the schema for the given tableName.
......@@ -63,5 +63,5 @@ public interface LineageService {
* @param tableName tableName
* @return Schema as JSON
*/
String getSchema(String tableName) throws MetadataException;
String getSchema(String tableName) throws AtlasException;
}
......@@ -26,12 +26,4 @@ import org.apache.atlas.repository.IndexException;
* Interface for indexing types.
*/
public interface SearchIndexer extends TypesChangeListener {
/* Commit the indexes */
void commit() throws IndexException;
/* RollBack the index */
void rollback() throws IndexException;
}
......@@ -19,8 +19,7 @@
package org.apache.atlas.discovery.graph;
import com.thinkaurelius.titan.core.TitanVertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.query.Expressions;
import org.apache.atlas.query.GraphPersistenceStrategies;
import org.apache.atlas.query.GraphPersistenceStrategies$class;
......@@ -81,7 +80,7 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
public String fieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) {
try {
return metadataRepository.getFieldNameInVertex(dataType, aInfo);
} catch (MetadataException e) {
} catch (AtlasException e) {
throw new RuntimeException(e);
}
}
......@@ -160,7 +159,7 @@ public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategi
throw new UnsupportedOperationException(
"Load for type " + dataType + "is not supported");
}
} catch (MetadataException e) {
} catch (AtlasException e) {
LOG.error("error while constructing an instance", e);
}
......
......@@ -24,7 +24,7 @@ import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.discovery.DiscoveryException;
import org.apache.atlas.discovery.DiscoveryService;
import org.apache.atlas.query.Expressions;
......@@ -98,7 +98,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
if (guid != null) { //Filter non-class entities
try {
row.put("guid", guid);
row.put(MetadataServiceClient.TYPENAME, vertex.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY));
row.put(AtlasClient.TYPENAME, vertex.<String>getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY));
row.put(SCORE, result.getScore());
} catch (JSONException e) {
LOG.error("Unable to create response", e);
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.listener;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
/**
......@@ -30,25 +30,25 @@ public interface EntityChangeListener {
* This is upon adding a new typed instance to the repository.
*
* @param typedInstance a typed instance
* @throws org.apache.atlas.MetadataException
* @throws AtlasException
*/
void onEntityAdded(ITypedReferenceableInstance typedInstance) throws MetadataException;
void onEntityAdded(ITypedReferenceableInstance typedInstance) throws AtlasException;
/**
* This is upon adding a new trait to a typed instance.
*
* @param guid globally unique identifier for the entity
* @param traitName trait name for the instance that needs to be added to entity
* @throws org.apache.atlas.MetadataException
* @throws AtlasException
*/
void onTraitAdded(String guid, String traitName) throws MetadataException;
void onTraitAdded(String guid, String traitName) throws AtlasException;
/**
* This is upon deleting a trait from a typed instance.
*
* @param guid globally unique identifier for the entity
* @param traitName trait name for the instance that needs to be deleted from entity
* @throws org.apache.atlas.MetadataException
* @throws AtlasException
*/
void onTraitDeleted(String guid, String traitName) throws MetadataException;
void onTraitDeleted(String guid, String traitName) throws AtlasException;
}
......@@ -18,7 +18,7 @@
package org.apache.atlas.listener;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.types.IDataType;
import java.util.Collection;
......@@ -32,15 +32,15 @@ public interface TypesChangeListener {
* This is upon adding new type(s) to Store.
*
* @param dataTypes data type
* @throws MetadataException
* @throws AtlasException
*/
void onAdd(Collection<? extends IDataType> dataTypes) throws MetadataException;
void onAdd(Collection<? extends IDataType> dataTypes) throws AtlasException;
/**
* This is upon removing an existing type from the Store.
*
* @param typeName type name
* @throws MetadataException
* @throws AtlasException
*/
// void onRemove(String typeName) throws MetadataException;
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.repository;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.DataTypes;
......@@ -43,7 +43,7 @@ public class DiscoverInstances implements ObjectGraphWalker.NodeProcessor {
}
@Override
public void processNode(ObjectGraphWalker.Node nd) throws MetadataException {
public void processNode(ObjectGraphWalker.Node nd) throws AtlasException {
IReferenceableInstance ref = null;
Id id = null;
......
......@@ -18,9 +18,9 @@
package org.apache.atlas.repository;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
public class IndexException extends MetadataException {
public class IndexException extends AtlasException {
public IndexException() {
}
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.repository;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.ITypedStruct;
......@@ -62,7 +62,7 @@ public interface MetadataRepository {
* @param aInfo attribute info
* @return property key used to store a given attribute
*/
String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) throws MetadataException;
String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) throws AtlasException;
/**
* Return the edge label for a given attribute in the repository.
......@@ -130,7 +130,7 @@ public interface MetadataRepository {
* @return a list of trait names for the given entity guid
* @throws RepositoryException
*/
List<String> getTraitNames(String guid) throws MetadataException;
List<String> getTraitNames(String guid) throws AtlasException;
/**
* Adds a new trait to an existing entity represented by a guid.
......
......@@ -18,12 +18,12 @@
package org.apache.atlas.repository;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
/**
* Base Exception class for Repository API.
*/
public class RepositoryException extends MetadataException {
public class RepositoryException extends AtlasException {
public RepositoryException() {
}
......
......@@ -27,7 +27,7 @@ import com.thinkaurelius.titan.core.schema.TitanGraphIndex;
import com.thinkaurelius.titan.core.schema.TitanManagement;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.discovery.SearchIndexer;
import org.apache.atlas.listener.TypesChangeListener;
import org.apache.atlas.repository.Constants;
......@@ -53,7 +53,7 @@ import java.util.Map;
/**
* Adds index for properties of a given type when its added before any instances are added.
*/
public class GraphBackedSearchIndexer implements SearchIndexer, Provider<TypesChangeListener> {
public class GraphBackedSearchIndexer implements SearchIndexer {
private static final Logger LOG = LoggerFactory.getLogger(GraphBackedSearchIndexer.class);
......@@ -145,10 +145,10 @@ public class GraphBackedSearchIndexer implements SearchIndexer, Provider<TypesCh
* This is upon adding a new type to Store.
*
* @param dataTypes data type
* @throws org.apache.atlas.MetadataException
* @throws org.apache.atlas.AtlasException
*/
@Override
public void onAdd(Collection<? extends IDataType> dataTypes) throws MetadataException {
public void onAdd(Collection<? extends IDataType> dataTypes) throws AtlasException {
for(IDataType dataType : dataTypes) {
LOG.info("Creating indexes for type name={}, definition={}", dataType.getName(), dataType.getClass());
......@@ -157,9 +157,13 @@ public class GraphBackedSearchIndexer implements SearchIndexer, Provider<TypesCh
LOG.info("Index creation for type {} complete", dataType.getName());
} catch (Throwable throwable) {
LOG.error("Error creating index for type {}", dataType, throwable);
//Rollback indexes if any failure
rollback();
throw new IndexCreationException("Error while creating index for type " + dataType, throwable);
}
}
//Commit indexes
commit();
}
private void addIndexForType(IDataType dataType) {
......@@ -350,7 +354,6 @@ public class GraphBackedSearchIndexer implements SearchIndexer, Provider<TypesCh
return true;
}
@Override
public void commit() throws IndexException {
try {
management.commit();
......@@ -360,7 +363,6 @@ public class GraphBackedSearchIndexer implements SearchIndexer, Provider<TypesCh
}
}
@Override
public void rollback() throws IndexException {
try {
management.rollback();
......@@ -370,11 +372,6 @@ public class GraphBackedSearchIndexer implements SearchIndexer, Provider<TypesCh
}
}
@Override
public TypesChangeListener get() {
return this;
}
/* Commenting this out since we do not need an index for edge label here
private void createEdgeMixedIndex(String propertyName) {
EdgeLabel edgeLabel = management.getEdgeLabel(propertyName);
......
......@@ -21,7 +21,7 @@ package org.apache.atlas.repository.graph;
import com.google.inject.Provides;
import com.thinkaurelius.titan.core.TitanFactory;
import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.PropertiesUtil;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.PropertiesConfiguration;
......@@ -43,7 +43,7 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> {
*/
private static final String ATLAS_PREFIX = "atlas.graph.";
private static Configuration getConfiguration() throws MetadataException {
private static Configuration getConfiguration() throws AtlasException {
PropertiesConfiguration configProperties = PropertiesUtil.getApplicationProperties();
Configuration graphConfig = new PropertiesConfiguration();
......@@ -69,7 +69,7 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> {
Configuration config;
try {
config = getConfiguration();
} catch (MetadataException e) {
} catch (AtlasException e) {
throw new RuntimeException(e);
}
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.RepositoryException;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
......@@ -87,7 +87,7 @@ public class ClassStore extends HierarchicalTypeStore {
try {
ReferenceableInstance r = (ReferenceableInstance) classType.createInstance(id, tNs);
return r;
} catch (MetadataException me) {
} catch (AtlasException me) {
throw new RepositoryException(me);
}
}
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.repository.memory;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.DiscoverInstances;
import org.apache.atlas.repository.IRepository;
import org.apache.atlas.repository.RepositoryException;
......@@ -120,7 +120,7 @@ public class MemRepository implements IRepository {
*/
try {
new ObjectGraphWalker(typeSystem, discoverInstances, i).walk();
} catch (MetadataException me) {
} catch (AtlasException me) {
throw new RepositoryException("TypeSystem error when walking the ObjectGraph", me);
}
......@@ -170,7 +170,7 @@ public class MemRepository implements IRepository {
MapIds mapIds = new MapIds(discoverInstances.idToNewIdMap);
new ObjectGraphWalker(typeSystem, mapIds, newInstances).walk();
} catch (MetadataException me) {
} catch (AtlasException me) {
throw new RepositoryException(
String.format("Failed to create Instance(id = %s",
transientInstance.getId()), me);
......@@ -255,7 +255,7 @@ public class MemRepository implements IRepository {
ITypedReferenceableInstance r = getDuringWalk(id, walker);
walker.walk();
return r;
} catch (MetadataException me) {
} catch (AtlasException me) {
throw new RepositoryException("TypeSystem error when walking the ObjectGraph", me);
}
}
......
......@@ -22,7 +22,7 @@ import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.DataTypes;
......@@ -49,7 +49,7 @@ public class ReplaceIdWithInstance implements ObjectGraphWalker.NodeProcessor {
}
@Override
public void processNode(ObjectGraphWalker.Node nd) throws MetadataException {
public void processNode(ObjectGraphWalker.Node nd) throws AtlasException {
if (nd.attributeName == null) {
// do nothing
} else if (!nd.aInfo.isComposite || nd.value == null) {
......@@ -74,7 +74,7 @@ public class ReplaceIdWithInstance implements ObjectGraphWalker.NodeProcessor {
ImmutableCollection<?> convertToInstances(ImmutableCollection<?> val,
Multiplicity m, DataTypes.ArrayType arrType)
throws MetadataException {
throws AtlasException {
if (val == null ||
arrType.getElemType().getTypeCategory() != DataTypes.TypeCategory.CLASS) {
......@@ -99,7 +99,7 @@ public class ReplaceIdWithInstance implements ObjectGraphWalker.NodeProcessor {
ImmutableMap<?, ?> convertToInstances(ImmutableMap val, Multiplicity m,
DataTypes.MapType mapType)
throws MetadataException {
throws AtlasException {
if (val == null ||
(mapType.getKeyType().getTypeCategory() != DataTypes.TypeCategory.CLASS &&
......@@ -130,7 +130,7 @@ public class ReplaceIdWithInstance implements ObjectGraphWalker.NodeProcessor {
return b.build();
}
ITypedReferenceableInstance getInstance(Id id) throws MetadataException {
ITypedReferenceableInstance getInstance(Id id) throws AtlasException {
ITypedReferenceableInstance r = idToInstanceMap.get(id);
if (r == null) {
......
......@@ -26,7 +26,7 @@ import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphProvider;
import org.apache.atlas.typesystem.TypesDef;
......@@ -69,39 +69,36 @@ public class GraphBackedTypeStore implements ITypeStore {
}
@Override
public void store(TypeSystem typeSystem) throws MetadataException {
public void store(TypeSystem typeSystem) throws AtlasException {
store(typeSystem, ImmutableList.copyOf(typeSystem.getTypeNames()));
}
@Override
@GraphTransaction
public void store(TypeSystem typeSystem, ImmutableList<String> typeNames) throws MetadataException {
ImmutableList<String> coreTypes = typeSystem.getCoreTypes();
public void store(TypeSystem typeSystem, ImmutableList<String> typeNames) throws AtlasException {
for (String typeName : typeNames) {
if (!coreTypes.contains(typeName)) {
IDataType dataType = typeSystem.getDataType(IDataType.class, typeName);
LOG.debug("Processing {}.{} in type store", dataType.getTypeCategory(), dataType.getName());
switch (dataType.getTypeCategory()) {
case ENUM:
storeInGraph((EnumType)dataType);
break;
case STRUCT:
StructType structType = (StructType) dataType;
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
ImmutableList.copyOf(structType.infoToNameMap.keySet()), ImmutableList.<String>of());
break;
case TRAIT:
case CLASS:
HierarchicalType type = (HierarchicalType) dataType;
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
type.immediateAttrs, type.superTypes);
break;
default: //Ignore primitive/collection types as they are covered under references
break;
}
IDataType dataType = typeSystem.getDataType(IDataType.class, typeName);
LOG.debug("Processing {}.{} in type store", dataType.getTypeCategory(), dataType.getName());
switch (dataType.getTypeCategory()) {
case ENUM:
storeInGraph((EnumType)dataType);
break;
case STRUCT:
StructType structType = (StructType) dataType;
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
ImmutableList.copyOf(structType.infoToNameMap.keySet()), ImmutableList.<String>of());
break;
case TRAIT:
case CLASS:
HierarchicalType type = (HierarchicalType) dataType;
storeInGraph(typeSystem, dataType.getTypeCategory(), dataType.getName(),
type.immediateAttrs, type.superTypes);
break;
default: //Ignore primitive/collection types as they are covered under references
break;
}
}
}
......@@ -135,7 +132,8 @@ public class GraphBackedTypeStore implements ITypeStore {
}
private void storeInGraph(TypeSystem typeSystem, DataTypes.TypeCategory category, String typeName,
ImmutableList<AttributeInfo> attributes, ImmutableList<String> superTypes) throws MetadataException {
ImmutableList<AttributeInfo> attributes, ImmutableList<String> superTypes) throws
AtlasException {
Vertex vertex = createVertex(category, typeName);
List<String> attrNames = new ArrayList<>();
if (attributes != null) {
......@@ -163,7 +161,8 @@ public class GraphBackedTypeStore implements ITypeStore {
}
//Add edges for complex attributes
private void addReferencesForAttribute(TypeSystem typeSystem, Vertex vertex, AttributeInfo attribute) throws MetadataException {
private void addReferencesForAttribute(TypeSystem typeSystem, Vertex vertex, AttributeInfo attribute) throws
AtlasException {
ImmutableList<String> coreTypes = typeSystem.getCoreTypes();
List<IDataType> attrDataTypes = new ArrayList<>();
IDataType attrDataType = attribute.dataType();
......@@ -207,13 +206,13 @@ public class GraphBackedTypeStore implements ITypeStore {
}
private void addEdge(Vertex fromVertex, Vertex toVertex, String label) {
LOG.debug("Adding edge from {} to {} with label {}" + toString(fromVertex), toString(toVertex), label);
LOG.debug("Adding edge from {} to {} with label {}", toString(fromVertex), toString(toVertex), label);
titanGraph.addEdge(null, fromVertex, toVertex, label);
}
@Override
@GraphTransaction
public TypesDef restore() throws MetadataException {
public TypesDef restore() throws AtlasException {
//Get all vertices for type system
Iterator vertices =
titanGraph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE).vertices().iterator();
......@@ -278,7 +277,7 @@ public class GraphBackedTypeStore implements ITypeStore {
return ImmutableList.copyOf(superTypes);
}
private AttributeDefinition[] getAttributes(Vertex vertex, String typeName) throws MetadataException {
private AttributeDefinition[] getAttributes(Vertex vertex, String typeName) throws AtlasException {
List<AttributeDefinition> attributes = new ArrayList<>();
List<String> attrNames = vertex.getProperty(getPropertyKey(typeName));
if (attrNames != null) {
......@@ -287,7 +286,7 @@ public class GraphBackedTypeStore implements ITypeStore {
String propertyKey = getPropertyKey(typeName, attrName);
attributes.add(AttributeInfo.fromJson((String) vertex.getProperty(propertyKey)));
} catch (JSONException e) {
throw new MetadataException(e);
throw new AtlasException(e);
}
}
}
......@@ -295,7 +294,7 @@ public class GraphBackedTypeStore implements ITypeStore {
}
private String toString(Vertex vertex) {
return PROPERTY_PREFIX + "." + vertex.getProperty(Constants.TYPENAME_PROPERTY_KEY);
return PROPERTY_PREFIX + vertex.getProperty(Constants.TYPENAME_PROPERTY_KEY);
}
/**
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.repository.typestore;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.TypesDef;
import org.apache.atlas.typesystem.types.TypeSystem;
......@@ -29,7 +29,7 @@ public interface ITypeStore {
* @param typeSystem type system to persist
* @throws StorageException
*/
void store(TypeSystem typeSystem) throws MetadataException;
void store(TypeSystem typeSystem) throws AtlasException;
/**
* Persist the given type in the type system - insert or update
......@@ -37,12 +37,12 @@ public interface ITypeStore {
* @param types types to persist
* @throws StorageException
*/
void store(TypeSystem typeSystem, ImmutableList<String> types) throws MetadataException;
void store(TypeSystem typeSystem, ImmutableList<String> types) throws AtlasException;
/**
* Restore all type definitions
* @return List of persisted type definitions
* @throws org.apache.atlas.MetadataException
* @throws AtlasException
*/
TypesDef restore() throws MetadataException;
TypesDef restore() throws AtlasException;
}
......@@ -18,9 +18,9 @@
package org.apache.atlas.repository.typestore;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
public class StorageException extends MetadataException {
public class StorageException extends AtlasException {
public StorageException(String type) {
super("Failure in typesystem storage for type " + type);
}
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.services;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.types.DataTypes;
import org.codehaus.jettison.json.JSONObject;
......@@ -36,7 +36,7 @@ public interface MetadataService {
* @param typeDefinition definition as json
* @return a unique id for this type
*/
JSONObject createType(String typeDefinition) throws MetadataException;
JSONObject createType(String typeDefinition) throws AtlasException;
/**
* Return the definition for the given type.
......@@ -44,21 +44,21 @@ public interface MetadataService {
* @param typeName name for this type, must be unique
* @return type definition as JSON
*/
String getTypeDefinition(String typeName) throws MetadataException;
String getTypeDefinition(String typeName) throws AtlasException;
/**
* Return the list of types in the type system.
*
* @return list of type names in the type system
*/
List<String> getTypeNamesList() throws MetadataException;
List<String> getTypeNamesList() throws AtlasException;
/**
* Return the list of trait type names in the type system.
*
* @return list of trait type names in the type system
*/
List<String> getTypeNamesByCategory(DataTypes.TypeCategory typeCategory) throws MetadataException;
List<String> getTypeNamesByCategory(DataTypes.TypeCategory typeCategory) throws AtlasException;
/**
* Creates an entity, instance of the type.
......@@ -66,7 +66,7 @@ public interface MetadataService {
* @param entityDefinition definition
* @return guid
*/
String createEntity(String entityDefinition) throws MetadataException;
String createEntity(String entityDefinition) throws AtlasException;
/**
* Return the definition for the given guid.
......@@ -74,7 +74,7 @@ public interface MetadataService {
* @param guid guid
* @return entity definition as JSON
*/
String getEntityDefinition(String guid) throws MetadataException;
String getEntityDefinition(String guid) throws AtlasException;
/**
* Return the list of entity names for the given type in the repository.
......@@ -82,7 +82,7 @@ public interface MetadataService {
* @param entityType type
* @return list of entity names for the given type in the repository
*/
List<String> getEntityList(String entityType) throws MetadataException;
List<String> getEntityList(String entityType) throws AtlasException;
/**
* Adds the property to the given entity id(guid).
......@@ -91,7 +91,7 @@ public interface MetadataService {
* @param property property name
* @param value property value
*/
void updateEntity(String guid, String property, String value) throws MetadataException;
void updateEntity(String guid, String property, String value) throws AtlasException;
// Trait management functions
/**
......@@ -99,27 +99,27 @@ public interface MetadataService {
*
* @param guid globally unique identifier for the entity
* @return a list of trait names for the given entity guid
* @throws MetadataException
* @throws AtlasException
*/
List<String> getTraitNames(String guid) throws MetadataException;
List<String> getTraitNames(String guid) throws AtlasException;
/**
* Adds a new trait to an existing entity represented by a guid.
*
* @param guid globally unique identifier for the entity
* @param traitInstanceDefinition trait instance that needs to be added to entity
* @throws MetadataException
* @throws AtlasException
*/
void addTrait(String guid,
String traitInstanceDefinition) throws MetadataException;
String traitInstanceDefinition) throws AtlasException;
/**
* Deletes a given trait from an existing entity represented by a guid.
*
* @param guid globally unique identifier for the entity
* @param traitNameToBeDeleted name of the trait
* @throws MetadataException
* @throws AtlasException
*/
void deleteTrait(String guid,
String traitNameToBeDeleted) throws MetadataException;
String traitNameToBeDeleted) throws AtlasException;
}
......@@ -18,7 +18,7 @@
package org.apache.atlas.query
import org.apache.atlas.MetadataException
import org.apache.atlas.AtlasException
import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory}
import org.apache.atlas.typesystem.types._
......@@ -28,7 +28,7 @@ object Expressions {
class ExpressionException(val e: Expression, message: String, cause: Throwable, enableSuppression: Boolean,
writableStackTrace: Boolean)
extends MetadataException(message, cause, enableSuppression, writableStackTrace) {
extends AtlasException(message, cause, enableSuppression, writableStackTrace) {
def this(e: Expression, message: String) {
this(e, message, null, false, false)
......@@ -510,7 +510,7 @@ object Expressions {
try {
typSystem.getDataType(classOf[TraitType], traitName)
} catch {
case me: MetadataException => throw new ExpressionException(this, "not a TraitType", me)
case me: AtlasException => throw new ExpressionException(this, "not a TraitType", me)
}
override lazy val resolved = classExpression.isDefined
......@@ -580,7 +580,7 @@ object Expressions {
s"datatype. Can not resolve due to unresolved child")
}
if (!TypeUtils.fieldMapping(child.dataType).isDefined) {
throw new MetadataException(s"Cannot apply hasField on ${child.dataType.getName}")
throw new AtlasException(s"Cannot apply hasField on ${child.dataType.getName}")
}
DataTypes.BOOLEAN_TYPE
}
......@@ -615,7 +615,7 @@ object Expressions {
}
children.foreach { childExpr =>
if (childExpr.dataType != DataTypes.BOOLEAN_TYPE) {
throw new MetadataException(
throw new AtlasException(
s"Cannot apply logical operator '$symbol' on input of type '${childExpr.dataType}")
}
}
......
......@@ -21,7 +21,7 @@ package org.apache.atlas.query
import java.util
import java.util.concurrent.atomic.AtomicInteger
import org.apache.atlas.MetadataException
import org.apache.atlas.AtlasException
import org.apache.atlas.query.Expressions.{PathExpression, SelectExpression}
import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory}
import org.apache.atlas.typesystem.types._
......@@ -52,7 +52,7 @@ object TypeUtils {
return numericTypes(rIdx)
}
throw new MetadataException(s"Cannot combine types: ${typ1.getName} and ${typ2.getName}")
throw new AtlasException(s"Cannot combine types: ${typ1.getName} and ${typ2.getName}")
}
var tempStructCounter : AtomicInteger = new AtomicInteger(0)
......@@ -239,7 +239,7 @@ object TypeUtils {
}
}
} catch {
case _ : MetadataException => None
case _ : AtlasException => None
}
}
None
......@@ -249,7 +249,7 @@ object TypeUtils {
try {
Some(typSystem.getDataType(classOf[ClassType], id))
} catch {
case _ : MetadataException => None
case _ : AtlasException => None
}
}
......@@ -257,7 +257,7 @@ object TypeUtils {
try {
Some(typSystem.getDataType(classOf[TraitType], id))
} catch {
case _ : MetadataException => None
case _ : AtlasException => None
}
}
}
......@@ -88,7 +88,7 @@ public final class TestUtils {
* <p/>
* Persons can have SecurityClearance(level : Int) clearance.
*/
public static void defineDeptEmployeeTypes(TypeSystem ts) throws MetadataException {
public static void defineDeptEmployeeTypes(TypeSystem ts) throws AtlasException {
EnumTypeDefinition orgLevelEnum =
new EnumTypeDefinition("OrgLevel", new EnumValue("L1", 1), new EnumValue("L2", 2));
......@@ -135,7 +135,7 @@ public final class TestUtils {
ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
}
public static Referenceable createDeptEg1(TypeSystem ts) throws MetadataException {
public static Referenceable createDeptEg1(TypeSystem ts) throws AtlasException {
Referenceable hrDept = new Referenceable("Department");
Referenceable john = new Referenceable("Person");
Referenceable jane = new Referenceable("Manager", "SecurityClearance");
......
......@@ -21,7 +21,7 @@ package org.apache.atlas.repository;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.memory.MemRepository;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
......@@ -53,7 +53,7 @@ public abstract class BaseTest {
public static final long TEST_DATE_IN_LONG=1418265358440L;
protected IRepository repo;
public static Struct createStruct() throws MetadataException {
public static Struct createStruct() throws AtlasException {
StructType structType = (StructType) TypeSystem.getInstance()
.getDataType(StructType.class, STRUCT_TYPE_1);
Struct s = new Struct(structType.getName());
......@@ -120,7 +120,7 @@ public abstract class BaseTest {
}
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs)
throws MetadataException {
throws AtlasException {
return getTypeSystem().defineTraitTypes(tDefs);
}
......@@ -133,7 +133,7 @@ public abstract class BaseTest {
*
* Persons can have SecurityClearance(level : Int) clearance.
*/
protected void defineDeptEmployeeTypes(TypeSystem ts) throws MetadataException {
protected void defineDeptEmployeeTypes(TypeSystem ts) throws AtlasException {
HierarchicalTypeDefinition<ClassType> deptTypeDef =
TypesUtil.createClassTypeDef("Department", ImmutableList.<String>of(),
......@@ -179,7 +179,7 @@ public abstract class BaseTest {
}
protected Referenceable createDeptEg1(TypeSystem ts) throws MetadataException {
protected Referenceable createDeptEg1(TypeSystem ts) throws AtlasException {
Referenceable hrDept = new Referenceable("Department");
Referenceable john = new Referenceable("Person");
Referenceable jane = new Referenceable("Manager", "SecurityClearance");
......
......@@ -223,7 +223,6 @@ public class GraphRepoMapperScaleTest {
EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("table_type", values);
final EnumType enumType = typeSystem.defineEnumType(enumTypeDefinition);
searchIndexer.onAdd(new ArrayList<IDataType>() {{ add(enumType); }});
HierarchicalTypeDefinition<ClassType> columnsDefinition =
TypesUtil.createClassTypeDef("hive_column_type",
......@@ -275,10 +274,11 @@ public class GraphRepoMapperScaleTest {
ImmutableList.of(classificationTypeDefinition),
ImmutableList.of(databaseTypeDefinition, columnsDefinition, tableTypeDefinition));
for (final Map.Entry<String, IDataType> entry : types.entrySet()) {
searchIndexer.onAdd(new ArrayList<IDataType>() {{ add(entry.getValue()); }});
}
searchIndexer.commit();
ArrayList<IDataType> typesAdded = new ArrayList<IDataType>();
typesAdded.add(enumType);
typesAdded.addAll(types.values());
searchIndexer.onAdd(typesAdded);
}
private ITypedReferenceableInstance createHiveTableInstance(
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.repository.memory;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.BaseTest;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
......@@ -37,7 +37,7 @@ public class ClassTest extends BaseTest {
}
@Test
public void test1() throws MetadataException {
public void test1() throws AtlasException {
TypeSystem ts = getTypeSystem();
......
......@@ -21,7 +21,7 @@ package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.BaseTest;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
......@@ -54,7 +54,7 @@ public class EnumTest extends BaseTest {
super.setup();
}
void defineEnums(TypeSystem ts) throws MetadataException {
void defineEnums(TypeSystem ts) throws AtlasException {
ts.defineEnumType("HiveObjectType",
new EnumValue("GLOBAL", 1),
new EnumValue("DATABASE", 2),
......@@ -79,7 +79,7 @@ public class EnumTest extends BaseTest {
}
protected void fillStruct(Struct s) throws MetadataException {
protected void fillStruct(Struct s) throws AtlasException {
s.set("a", 1);
s.set("b", true);
s.set("c", (byte) 1);
......@@ -105,19 +105,19 @@ public class EnumTest extends BaseTest {
s.set("enum4", 3);
}
protected Struct createStructWithEnum(String typeName) throws MetadataException {
protected Struct createStructWithEnum(String typeName) throws AtlasException {
Struct s = new Struct(typeName);
fillStruct(s);
return s;
}
protected Referenceable createInstanceWithEnum(String typeName) throws MetadataException {
protected Referenceable createInstanceWithEnum(String typeName) throws AtlasException {
Referenceable r = new Referenceable(typeName);
fillStruct(r);
return r;
}
protected ClassType defineClassTypeWithEnum(TypeSystem ts) throws MetadataException {
protected ClassType defineClassTypeWithEnum(TypeSystem ts) throws AtlasException {
return ts.defineClassType(TypesUtil.createClassTypeDef("t4",
ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
......@@ -147,7 +147,7 @@ public class EnumTest extends BaseTest {
}
@Test
public void testStruct() throws MetadataException {
public void testStruct() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineEnums(ts);
StructType structType = ts.defineStructType("t3",
......@@ -204,7 +204,7 @@ public class EnumTest extends BaseTest {
}
@Test
public void testClass() throws MetadataException {
public void testClass() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineEnums(ts);
ClassType clsType = defineClassTypeWithEnum(ts);
......@@ -236,7 +236,7 @@ public class EnumTest extends BaseTest {
}
@Test
public void testStorage() throws MetadataException {
public void testStorage() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineEnums(ts);
......@@ -273,7 +273,7 @@ public class EnumTest extends BaseTest {
}
@Test
public void testJson() throws MetadataException {
public void testJson() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineEnums(ts);
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.BaseTest;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
......@@ -45,7 +45,7 @@ import java.util.List;
public class InstanceE2ETest extends BaseTest {
protected List<HierarchicalTypeDefinition> createHiveTypes(TypeSystem typeSystem)
throws MetadataException {
throws AtlasException {
ArrayList<HierarchicalTypeDefinition> typeDefinitions = new ArrayList<>();
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
......@@ -80,7 +80,7 @@ public class InstanceE2ETest extends BaseTest {
}
protected Referenceable createHiveTableReferenceable()
throws MetadataException {
throws AtlasException {
Referenceable databaseInstance = new Referenceable("hive_database");
databaseInstance.set("name", "hive_database");
databaseInstance.set("description", "foo database");
......@@ -100,13 +100,13 @@ public class InstanceE2ETest extends BaseTest {
}
protected ITypedReferenceableInstance createHiveTableInstance(TypeSystem typeSystem)
throws MetadataException {
throws AtlasException {
ClassType tableType = typeSystem.getDataType(ClassType.class, "hive_table");
return tableType.convert(createHiveTableReferenceable(), Multiplicity.REQUIRED);
}
@Test
public void testType() throws MetadataException {
public void testType() throws AtlasException {
TypeSystem ts = getTypeSystem();
......@@ -128,7 +128,7 @@ public class InstanceE2ETest extends BaseTest {
}
@Test
public void testInstance() throws MetadataException {
public void testInstance() throws AtlasException {
TypeSystem ts = getTypeSystem();
......@@ -144,7 +144,7 @@ public class InstanceE2ETest extends BaseTest {
}
@Test
public void testInstanceSerialization() throws MetadataException {
public void testInstanceSerialization() throws AtlasException {
TypeSystem ts = getTypeSystem();
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.repository.memory;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.BaseTest;
import org.apache.atlas.repository.RepositoryException;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
......@@ -37,7 +37,7 @@ public class StorageTest extends BaseTest {
}
@Test
public void test1() throws MetadataException {
public void test1() throws AtlasException {
TypeSystem ts = getTypeSystem();
......@@ -68,7 +68,7 @@ public class StorageTest extends BaseTest {
}
@Test
public void testGetPerson() throws MetadataException {
public void testGetPerson() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineDeptEmployeeTypes(ts);
......@@ -86,7 +86,7 @@ public class StorageTest extends BaseTest {
}
@Test
public void testInvalidTypeName() throws MetadataException {
public void testInvalidTypeName() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineDeptEmployeeTypes(ts);
......@@ -104,7 +104,7 @@ public class StorageTest extends BaseTest {
}
@Test
public void testGetManager() throws MetadataException {
public void testGetManager() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineDeptEmployeeTypes(ts);
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.repository.memory;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.BaseTest;
import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.Struct;
......@@ -43,7 +43,7 @@ public class StructTest extends BaseTest {
}
@Test
public void test1() throws MetadataException {
public void test1() throws AtlasException {
Struct s = createStruct();
ITypedStruct ts = structType.convert(s, Multiplicity.REQUIRED);
Assert.assertEquals(ts.toString(), "{\n" +
......@@ -66,7 +66,7 @@ public class StructTest extends BaseTest {
}
@Test
public void testRecursive() throws MetadataException {
public void testRecursive() throws AtlasException {
Struct s1 = new Struct(recursiveStructType.getName());
s1.set("a", 1);
Struct s2 = new Struct(recursiveStructType.getName());
......@@ -84,7 +84,7 @@ public class StructTest extends BaseTest {
}
@Test
public void testSerialization() throws MetadataException {
public void testSerialization() throws AtlasException {
Struct s = createStruct();
String jsonStr = InstanceSerialization$.MODULE$.toJson(s, true);
Struct s1 = InstanceSerialization$.MODULE$.fromJsonStruct(jsonStr, true);
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.repository.memory;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.BaseTest;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedStruct;
......@@ -64,7 +64,7 @@ public class TraitTest extends BaseTest {
* available as 'b'.
*/
@Test
public void test1() throws MetadataException {
public void test1() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
......@@ -154,7 +154,7 @@ public class TraitTest extends BaseTest {
}
@Test
public void testRandomOrder() throws MetadataException {
public void testRandomOrder() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
......
......@@ -24,7 +24,7 @@ import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import junit.framework.Assert;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.TestUtils;
import org.apache.atlas.repository.graph.GraphHelper;
......@@ -65,7 +65,7 @@ public class GraphBackedTypeStoreTest {
@Test
@GraphTransaction
public void testStore() throws MetadataException {
public void testStore() throws AtlasException {
typeStore.store(ts);
dumpGraph();
}
......
......@@ -22,8 +22,8 @@ import traceback
import atlas_config as mc
METADATA_LOG_OPTS="-Datlas.log.dir=%s -Datlas.log.file=application.log"
METADATA_COMMAND_OPTS="-Dmetadata.home=%s"
METADATA_CONFIG_OPTS="-Dmetadata.conf=%s"
METADATA_COMMAND_OPTS="-Datlas.home=%s"
METADATA_CONFIG_OPTS="-Datlas.conf=%s"
DEFAULT_JVM_OPTS="-Xmx1024m"
def main():
......
......@@ -50,13 +50,13 @@ class TestMetadata(unittest.TestCase):
'org.apache.atlas.Main',
['-app', 'metadata_home/server/webapp/atlas'],
'metadata_home/conf:metadata_home/server/webapp/atlas/WEB-INF/classes:metadata_home/server/webapp/atlas/WEB-INF/lib\\*:metadata_home/libext\\*',
['-Datlas.log.dir=metadata_home/logs', '-Datlas.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs')
['-Datlas.log.dir=metadata_home/logs', '-Datlas.log.file=application.log', '-0Datlas.home=metadata_home', '-Datlas.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs')
else:
java_mock.assert_called_with(
'org.apache.atlas.Main',
['-app', 'metadata_home/server/webapp/atlas'],
'metadata_home/conf:metadata_home/server/webapp/atlas/WEB-INF/classes:metadata_home/server/webapp/atlas/WEB-INF/lib/*:metadata_home/libext/*',
['-Datlas.log.dir=metadata_home/logs', '-Datlas.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs')
['-Datlas.log.dir=metadata_home/logs', '-Datlas.log.file=application.log', '-Datlas.home=metadata_home', '-Datlas.conf=metadata_home/conf', '-Xmx1024m'], 'metadata_home/logs')
pass
......
......@@ -69,7 +69,7 @@ package object dsl {
new AttributeDefinition(name, dT.getName, m, isComposite, reverseAttributeName)
}
def listTypes = (ts.getTypeNames -- ts.getCoreTypes).sorted.toList.mkString("[", ",", "]")
def listTypes = ts.getTypeNames.sorted.toList.mkString("[", ",", "]")
def ts = TypeSystem.getInstance
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.tools.thrift
import org.apache.atlas.MetadataException
import org.apache.atlas.AtlasException
import org.apache.atlas.typesystem.types.DataTypes
import scala.util.parsing.combinator.lexical.StdLexical
......@@ -37,7 +37,7 @@ object BASE_TYPES extends Enumeration {
val I64 = Value("i64")
val DOUBLE = Value("double")
@throws[MetadataException]
@throws[AtlasException]
def toPrimitiveTypeName(t : BASE_TYPES.Value) : String = t match {
case STRING => DataTypes.STRING_TYPE.getName
case SLIST => DataTypes.STRING_TYPE.getName
......@@ -47,7 +47,7 @@ object BASE_TYPES extends Enumeration {
case I32 => DataTypes.INT_TYPE.getName
case I64 => DataTypes.LONG_TYPE.getName
case DOUBLE => DataTypes.DOUBLE_TYPE.getName
case _ => throw new MetadataException(s"Thrift BaseType ($t) not supported")
case _ => throw new AtlasException(s"Thrift BaseType ($t) not supported")
}
}
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.tools.thrift
import com.google.common.collect.ImmutableList
import org.apache.atlas.MetadataException
import org.apache.atlas.AtlasException
import org.apache.atlas.typesystem.TypesDef
import org.apache.atlas.typesystem.types.{DataTypes, HierarchicalTypeDefinition, Multiplicity, TraitType, _}
import org.slf4j.{Logger, LoggerFactory}
......@@ -89,12 +89,12 @@ class ThriftTypesGen(val structNames: List[String], val classNames: List[String]
val sDef = thriftDef.structs.find(_.name == cr.typeName)
if (!sDef.isDefined) {
throw new MetadataException(s"Unknown Struct (${cr.typeName}) specified in CompositeRelation")
throw new AtlasException(s"Unknown Struct (${cr.typeName}) specified in CompositeRelation")
}
val fDef = sDef.get.fields.find(_.name == cr.fieldName)
if (!fDef.isDefined) {
throw new MetadataException(s"Unknown Field (${cr.fieldName}) specified in CompositeRelation")
throw new AtlasException(s"Unknown Field (${cr.fieldName}) specified in CompositeRelation")
}
......@@ -104,11 +104,11 @@ class ThriftTypesGen(val structNames: List[String], val classNames: List[String]
val reverseStructName = dataTypeName(fDef.get.fieldType)
val reverseStructDef = thriftDef.structs.find(_.name == reverseStructName)
if (!reverseStructDef.isDefined) {
throw new MetadataException(s"Cannot find Struct $reverseStructName in CompositeRelation $cr")
throw new AtlasException(s"Cannot find Struct $reverseStructName in CompositeRelation $cr")
}
val rfDef = reverseStructDef.get.fields.find(_.name == cr.reverseFieldName)
if (!rfDef.isDefined) {
throw new MetadataException(s"Unknown Reverse Field (${cr.reverseFieldName}) specified in CompositeRelation")
throw new AtlasException(s"Unknown Reverse Field (${cr.reverseFieldName}) specified in CompositeRelation")
}
List(cr, CompositeRelation(reverseStructName, cr.reverseFieldName.get, Some(cr.fieldName)))
......@@ -155,13 +155,13 @@ class ThriftTypesGen(val structNames: List[String], val classNames: List[String]
case Some(s) => s
case None => {
LOG.debug("Parse for thrift resource {} failed", thriftResource)
throw new MetadataException(s"Failed to parse thrift resource: $thriftResource")
throw new AtlasException(s"Failed to parse thrift resource: $thriftResource")
}
}
}
}
@throws[MetadataException]
@throws[AtlasException]
private def dataTypeName(fT: FieldType): String = fT match {
case IdentifierType(n) => n
case BaseType(typ, _) => BASE_TYPES.toPrimitiveTypeName(typ)
......@@ -185,7 +185,7 @@ class ThriftTypesGen(val structNames: List[String], val classNames: List[String]
private def includeDef(td: TypesDef, i: IncludeDef): Try[TypesDef] = {
Try {
if (i.value != FB_INCLUDE) {
throw new MetadataException(s"Unsupported Include ${i.value}, only fb303.thrift is currently allowed.")
throw new AtlasException(s"Unsupported Include ${i.value}, only fb303.thrift is currently allowed.")
}
td
}
......@@ -193,7 +193,7 @@ class ThriftTypesGen(val structNames: List[String], val classNames: List[String]
private def cppIncludeDef(td: TypesDef, i: CppIncludeDef): Try[TypesDef] = {
Try {
throw new MetadataException(s"Unsupported CppInclude ${i.value}.")
throw new AtlasException(s"Unsupported CppInclude ${i.value}.")
}
}
......@@ -213,7 +213,7 @@ class ThriftTypesGen(val structNames: List[String], val classNames: List[String]
private def senumDef(td: TypesDef, i: SEnumDef): Try[TypesDef] = {
Try {
throw new MetadataException(s"Unsupported SEnums ${i}.")
throw new AtlasException(s"Unsupported SEnums ${i}.")
}
}
......
......@@ -85,7 +85,7 @@ class DSLTest {
attrDef("o", mapType(STRING_TYPE, DOUBLE_TYPE)))
// 2. 'mytype' available as a a Type
Assert.assertEquals(s"${listTypes}", "[array<bigdecimal>,array<int>,map<string,double>,mytype,t1,t2]")
Assert.assertEquals(s"${listTypes}", "[mytype,t1,t2]")
// 3. Create a 'mytype' instance from Json
val i = createInstance("mytype", """
......@@ -125,7 +125,7 @@ class DSLTest {
@Test def test2 {
// 1. Existing Types in System
Assert.assertEquals(s"${listTypes}", "[array<bigdecimal>,array<int>,map<string,double>,t1,t2]")
Assert.assertEquals(s"${listTypes}", "[t1,t2]")
val addrType = defineStructType("addressType",
attrDef("houseNum", INT_TYPE, ATTR_REQUIRED),
......@@ -143,7 +143,7 @@ class DSLTest {
)
// 2. updated Types in System
Assert.assertEquals(s"${listTypes}", "[addressType,array<bigdecimal>,array<int>,map<string,double>,personType,t1,t2]")
Assert.assertEquals(s"${listTypes}", "[addressType,personType,t1,t2]")
// 3. Construct a Person in Code
......
......@@ -21,25 +21,24 @@ package org.apache.atlas;
/**
* Base Exception class for metadata API.
*/
public class MetadataException extends Exception {
public class AtlasException extends Exception {
public MetadataException() {
public AtlasException() {
}
public MetadataException(String message) {
public AtlasException(String message) {
super(message);
}
public MetadataException(String message, Throwable cause) {
public AtlasException(String message, Throwable cause) {
super(message, cause);
}
public MetadataException(Throwable cause) {
public AtlasException(Throwable cause) {
super(cause);
}
public MetadataException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
public AtlasException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
......@@ -21,7 +21,7 @@ package org.apache.atlas;
/**
* A simple wrapper for 404.
*/
public class TypeNotFoundException extends MetadataException {
public class TypeNotFoundException extends AtlasException {
public TypeNotFoundException() {
}
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.typesystem;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import java.util.Map;
......@@ -29,10 +29,10 @@ public interface IInstance {
String getTypeName();
Object get(String attrName) throws MetadataException;
Object get(String attrName) throws AtlasException;
void set(String attrName, Object val) throws MetadataException;
void set(String attrName, Object val) throws AtlasException;
Map<String, Object> getValuesMap() throws MetadataException;
Map<String, Object> getValuesMap() throws AtlasException;
}
......@@ -18,7 +18,7 @@
package org.apache.atlas.typesystem;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.types.FieldMapping;
import java.math.BigDecimal;
......@@ -37,49 +37,49 @@ public interface ITypedInstance extends IInstance {
FieldMapping fieldMapping();
void setNull(String attrName) throws MetadataException;
void setNull(String attrName) throws AtlasException;
boolean getBoolean(String attrName) throws MetadataException;
boolean getBoolean(String attrName) throws AtlasException;
byte getByte(String attrName) throws MetadataException;
byte getByte(String attrName) throws AtlasException;
short getShort(String attrName) throws MetadataException;
short getShort(String attrName) throws AtlasException;
int getInt(String attrName) throws MetadataException;
int getInt(String attrName) throws AtlasException;
long getLong(String attrName) throws MetadataException;
long getLong(String attrName) throws AtlasException;
float getFloat(String attrName) throws MetadataException;
float getFloat(String attrName) throws AtlasException;
double getDouble(String attrName) throws MetadataException;
double getDouble(String attrName) throws AtlasException;
BigInteger getBigInt(String attrName) throws MetadataException;
BigInteger getBigInt(String attrName) throws AtlasException;
BigDecimal getBigDecimal(String attrName) throws MetadataException;
BigDecimal getBigDecimal(String attrName) throws AtlasException;
Date getDate(String attrName) throws MetadataException;
Date getDate(String attrName) throws AtlasException;
String getString(String attrName) throws MetadataException;
String getString(String attrName) throws AtlasException;
void setBoolean(String attrName, boolean val) throws MetadataException;
void setBoolean(String attrName, boolean val) throws AtlasException;
void setByte(String attrName, byte val) throws MetadataException;
void setByte(String attrName, byte val) throws AtlasException;
void setShort(String attrName, short val) throws MetadataException;
void setShort(String attrName, short val) throws AtlasException;
void setInt(String attrName, int val) throws MetadataException;
void setInt(String attrName, int val) throws AtlasException;
void setLong(String attrName, long val) throws MetadataException;
void setLong(String attrName, long val) throws AtlasException;
void setFloat(String attrName, float val) throws MetadataException;
void setFloat(String attrName, float val) throws AtlasException;
void setDouble(String attrName, double val) throws MetadataException;
void setDouble(String attrName, double val) throws AtlasException;
void setBigInt(String attrName, BigInteger val) throws MetadataException;
void setBigInt(String attrName, BigInteger val) throws AtlasException;
void setBigDecimal(String attrName, BigDecimal val) throws MetadataException;
void setBigDecimal(String attrName, BigDecimal val) throws AtlasException;
void setDate(String attrName, Date val) throws MetadataException;
void setDate(String attrName, Date val) throws AtlasException;
void setString(String attrName, String val) throws MetadataException;
void setString(String attrName, String val) throws AtlasException;
}
......@@ -18,7 +18,7 @@
package org.apache.atlas.typesystem.persistence;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.types.DownCastFieldMapping;
......@@ -44,12 +44,12 @@ public class DownCastStructInstance implements IStruct {
}
@Override
public Object get(String attrName) throws MetadataException {
public Object get(String attrName) throws AtlasException {
return fieldMapping.get(this, attrName);
}
@Override
public void set(String attrName, Object val) throws MetadataException {
public void set(String attrName, Object val) throws AtlasException {
fieldMapping.set(this, attrName, val);
}
......@@ -58,7 +58,7 @@ public class DownCastStructInstance implements IStruct {
* @nonpublic
*/
@Override
public Map<String, Object> getValuesMap() throws MetadataException {
public Map<String, Object> getValuesMap() throws AtlasException {
Map<String,Object> m = new HashMap<>();
for (String attr : fieldMapping.fieldNameMap.keySet()) {
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.typesystem.persistence;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.types.FieldMapping;
......@@ -129,13 +129,13 @@ public class Id implements ITypedReferenceableInstance {
}
@Override
public Object get(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public Object get(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
@Override
public void set(String attrName, Object val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void set(String attrName, Object val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
@Override
......@@ -144,99 +144,99 @@ public class Id implements ITypedReferenceableInstance {
}
@Override
public Map<String, Object> getValuesMap() throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public Map<String, Object> getValuesMap() throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setNull(String attrName) throws MetadataException {
public void setNull(String attrName) throws AtlasException {
set(attrName, null);
}
public boolean getBoolean(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public boolean getBoolean(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public byte getByte(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public byte getByte(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public short getShort(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public short getShort(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public int getInt(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public int getInt(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public long getLong(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public long getLong(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public float getFloat(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public float getFloat(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public double getDouble(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public double getDouble(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public BigInteger getBigInt(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public BigInteger getBigInt(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public BigDecimal getBigDecimal(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public BigDecimal getBigDecimal(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public Date getDate(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public Date getDate(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public String getString(String attrName) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public String getString(String attrName) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setBoolean(String attrName, boolean val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void setBoolean(String attrName, boolean val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setByte(String attrName, byte val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void setByte(String attrName, byte val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setShort(String attrName, short val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void setShort(String attrName, short val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setInt(String attrName, int val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void setInt(String attrName, int val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setLong(String attrName, long val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void setLong(String attrName, long val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setFloat(String attrName, float val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void setFloat(String attrName, float val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setDouble(String attrName, double val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void setDouble(String attrName, double val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setBigInt(String attrName, BigInteger val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void setBigInt(String attrName, BigInteger val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setBigDecimal(String attrName, BigDecimal val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void setBigDecimal(String attrName, BigDecimal val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setDate(String attrName, Date val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void setDate(String attrName, Date val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
public void setString(String attrName, String val) throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
public void setString(String attrName, String val) throws AtlasException {
throw new AtlasException("Get/Set not supported on an Id object");
}
}
......@@ -20,7 +20,7 @@ package org.apache.atlas.typesystem.persistence;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableMap;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.ObjectGraphWalker;
......@@ -36,7 +36,7 @@ public class MapIds implements ObjectGraphWalker.NodeProcessor {
}
@Override
public void processNode(ObjectGraphWalker.Node nd) throws MetadataException {
public void processNode(ObjectGraphWalker.Node nd) throws AtlasException {
IReferenceableInstance ref = null;
Id id = null;
......
......@@ -20,7 +20,7 @@ package org.apache.atlas.typesystem.persistence;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.ITypedStruct;
......@@ -96,7 +96,7 @@ public class ReferenceableInstance extends StructInstance implements ITypedRefer
fieldMapping.output(this, buf, prefix);
return buf.toString();
} catch (MetadataException me) {
} catch (AtlasException me) {
throw new RuntimeException(me);
}
}
......
......@@ -18,11 +18,11 @@
package org.apache.atlas.typesystem.types;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
abstract class AbstractDataType<T> implements IDataType<T> {
protected T convertNull(Multiplicity m) throws MetadataException {
protected T convertNull(Multiplicity m) throws AtlasException {
if (!m.nullAllowed()) {
throw new ValueConversionException.NullConversionException(m);
}
......@@ -30,7 +30,7 @@ abstract class AbstractDataType<T> implements IDataType<T> {
}
@Override
public void output(T val, Appendable buf, String prefix) throws MetadataException {
public void output(T val, Appendable buf, String prefix) throws AtlasException {
TypeUtils.outputVal(val == null ? "<null>" : val.toString(), buf, prefix);
}
}
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.typesystem.types;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
......@@ -37,7 +37,7 @@ public class AttributeInfo {
public final String reverseAttributeName;
private IDataType dataType;
AttributeInfo(TypeSystem t, AttributeDefinition def, Map<String, IDataType> tempTypes) throws MetadataException {
AttributeInfo(TypeSystem t, AttributeDefinition def, Map<String, IDataType> tempTypes) throws AtlasException {
this.name = def.name;
this.dataType = (tempTypes != null && tempTypes.containsKey(def.dataTypeName)) ?
tempTypes.get(def.dataTypeName) : t.getDataType(IDataType.class, def.dataTypeName);
......
......@@ -21,7 +21,7 @@ package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
......@@ -55,7 +55,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
ClassType(TypeSystem typeSystem, String name, ImmutableList<String> superTypes,
AttributeInfo... fields)
throws MetadataException {
throws AtlasException {
super(typeSystem, ClassType.class, name, superTypes, fields);
infoToNameMap = TypeUtils.buildAttrInfoToNameMap(fieldMapping);
}
......@@ -65,27 +65,27 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
return DataTypes.TypeCategory.CLASS;
}
public void validateId(Id id) throws MetadataException {
public void validateId(Id id) throws AtlasException {
if (id != null) {
ClassType cType = typeSystem.getDataType(ClassType.class, id.className);
if (isSubType(cType.getName())) {
return;
}
throw new MetadataException(
throw new AtlasException(
String.format("Id %s is not valid for class %s", id, getName()));
}
}
protected Id getId(Object val) throws MetadataException {
protected Id getId(Object val) throws AtlasException {
if (val instanceof Referenceable) {
return ((Referenceable) val).getId();
}
throw new MetadataException(String.format("Cannot get id from class %s", val.getClass()));
throw new AtlasException(String.format("Cannot get id from class %s", val.getClass()));
}
@Override
public ITypedReferenceableInstance convert(Object val, Multiplicity m)
throws MetadataException {
throws AtlasException {
if (val != null) {
if (val instanceof ITypedReferenceableInstance) {
......@@ -163,23 +163,23 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
}
@Override
public ITypedReferenceableInstance createInstance() throws MetadataException {
public ITypedReferenceableInstance createInstance() throws AtlasException {
return createInstance((String[])null);
}
public ITypedReferenceableInstance createInstance(String... traitNames)
throws MetadataException {
throws AtlasException {
return createInstance(null, traitNames);
}
public ITypedReferenceableInstance createInstance(Id id, String... traitNames)
throws MetadataException {
throws AtlasException {
return createInstanceWithTraits(id, null, traitNames);
}
public ITypedReferenceableInstance createInstanceWithTraits(Id id, Referenceable r,
String... traitNames)
throws MetadataException {
throws AtlasException {
ImmutableMap.Builder<String, ITypedStruct> b
= new ImmutableBiMap.Builder<String, ITypedStruct>();
......@@ -220,7 +220,7 @@ public class ClassType extends HierarchicalType<ClassType, IReferenceableInstanc
@Override
public void output(IReferenceableInstance s, Appendable buf, String prefix)
throws MetadataException {
throws AtlasException {
fieldMapping.output(s, buf, prefix);
}
......
......@@ -22,7 +22,7 @@ import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.commons.lang3.StringUtils;
......@@ -103,7 +103,7 @@ public class DataTypes {
}
@Override
public Boolean convert(Object val, Multiplicity m) throws MetadataException {
public Boolean convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof Boolean) {
return (Boolean) val;
......@@ -136,7 +136,7 @@ public class DataTypes {
}
@Override
public Byte convert(Object val, Multiplicity m) throws MetadataException {
public Byte convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof Byte) {
return (Byte) val;
......@@ -169,7 +169,7 @@ public class DataTypes {
}
@Override
public Short convert(Object val, Multiplicity m) throws MetadataException {
public Short convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof Short) {
return (Short) val;
......@@ -202,7 +202,7 @@ public class DataTypes {
}
@Override
public Integer convert(Object val, Multiplicity m) throws MetadataException {
public Integer convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof Integer) {
return (Integer) val;
......@@ -235,7 +235,7 @@ public class DataTypes {
}
@Override
public Long convert(Object val, Multiplicity m) throws MetadataException {
public Long convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof Long) {
return (Long) val;
......@@ -268,7 +268,7 @@ public class DataTypes {
}
@Override
public Float convert(Object val, Multiplicity m) throws MetadataException {
public Float convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof Float) {
return (Float) val;
......@@ -301,7 +301,7 @@ public class DataTypes {
}
@Override
public Double convert(Object val, Multiplicity m) throws MetadataException {
public Double convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof Double) {
return (Double) val;
......@@ -334,7 +334,7 @@ public class DataTypes {
}
@Override
public BigInteger convert(Object val, Multiplicity m) throws MetadataException {
public BigInteger convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof BigInteger) {
return (BigInteger) val;
......@@ -373,7 +373,7 @@ public class DataTypes {
}
@Override
public BigDecimal convert(Object val, Multiplicity m) throws MetadataException {
public BigDecimal convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof BigDecimal) {
return (BigDecimal) val;
......@@ -412,7 +412,7 @@ public class DataTypes {
}
@Override
public Date convert(Object val, Multiplicity m) throws MetadataException {
public Date convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof Date) {
return (Date) val;
......@@ -432,7 +432,7 @@ public class DataTypes {
}
@Override
public void output(Date val, Appendable buf, String prefix) throws MetadataException {
public void output(Date val, Appendable buf, String prefix) throws AtlasException {
TypeUtils.outputVal(val == null ? "<null>" :
TypeSystem.getInstance().getDateFormat().format(val), buf, prefix);
}
......@@ -455,8 +455,8 @@ public class DataTypes {
}
@Override
public String convert(Object val, Multiplicity m) throws MetadataException {
if (StringUtils.isNotBlank((CharSequence) val)) {
public String convert(Object val, Multiplicity m) throws AtlasException {
if (val != null && (!(val instanceof String) || StringUtils.isNotBlank((CharSequence) val))) {
return val.toString();
}
return convertNull(m);
......@@ -491,7 +491,7 @@ public class DataTypes {
}
@Override
public ImmutableCollection<?> convert(Object val, Multiplicity m) throws MetadataException {
public ImmutableCollection<?> convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
Iterator it = null;
if (val instanceof Collection) {
......@@ -528,7 +528,7 @@ public class DataTypes {
public ImmutableCollection<?> mapIds(ImmutableCollection<?> val, Multiplicity m,
Map<Id, Id> transientToNewIds)
throws MetadataException {
throws AtlasException {
if (val == null || elemType.getTypeCategory() != TypeCategory.CLASS) {
return val;
......@@ -591,7 +591,7 @@ public class DataTypes {
}
@Override
public ImmutableMap<?, ?> convert(Object val, Multiplicity m) throws MetadataException {
public ImmutableMap<?, ?> convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
Iterator<Map.Entry> it = null;
if (Map.class.isAssignableFrom(val.getClass())) {
......@@ -619,7 +619,7 @@ public class DataTypes {
public ImmutableMap<?, ?> mapIds(ImmutableMap val, Multiplicity m,
Map<Id, Id> transientToNewIds)
throws MetadataException {
throws AtlasException {
if (val == null || (keyType.getTypeCategory() != TypeCategory.CLASS &&
valueType.getTypeCategory() != TypeCategory.CLASS)) {
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableMap;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.persistence.DownCastStructInstance;
public class DownCastFieldMapping {
......@@ -31,7 +31,7 @@ public class DownCastFieldMapping {
}
public void set(DownCastStructInstance s, String attrName, Object val)
throws MetadataException {
throws AtlasException {
String mappedNm = fieldNameMap.get(attrName);
if (mappedNm == null) {
......@@ -41,7 +41,7 @@ public class DownCastFieldMapping {
s.backingInstance.set(mappedNm, val);
}
public Object get(DownCastStructInstance s, String attrName) throws MetadataException {
public Object get(DownCastStructInstance s, String attrName) throws AtlasException {
String mappedNm = fieldNameMap.get(attrName);
if (mappedNm == null) {
......
......@@ -20,7 +20,7 @@ package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableMap;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import scala.math.BigInt;
public class EnumType extends AbstractDataType<EnumValue> {
......@@ -50,7 +50,7 @@ public class EnumType extends AbstractDataType<EnumValue> {
}
@Override
public EnumValue convert(Object val, Multiplicity m) throws MetadataException {
public EnumValue convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
EnumValue e = null;
if (val instanceof EnumValue) {
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.typesystem.types;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.persistence.Id;
......@@ -74,7 +74,7 @@ public class FieldMapping {
}
protected void outputFields(IStruct s, Appendable buf, String fieldPrefix)
throws MetadataException {
throws AtlasException {
for (Map.Entry<String, AttributeInfo> e : fields.entrySet()) {
String attrName = e.getKey();
AttributeInfo i = e.getValue();
......@@ -89,7 +89,7 @@ public class FieldMapping {
}
}
public void output(IStruct s, Appendable buf, String prefix) throws MetadataException {
public void output(IStruct s, Appendable buf, String prefix) throws AtlasException {
if (s == null) {
TypeUtils.outputVal("<null>\n", buf, "");
return;
......@@ -105,7 +105,7 @@ public class FieldMapping {
}
public void output(IReferenceableInstance s, Appendable buf, String prefix)
throws MetadataException {
throws AtlasException {
if (s == null) {
TypeUtils.outputVal("<null>\n", buf, "");
return;
......
......@@ -20,7 +20,7 @@ package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.persistence.DownCastStructInstance;
import org.apache.atlas.typesystem.types.TypeUtils.Pair;
......@@ -75,7 +75,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
HierarchicalType(TypeSystem typeSystem, Class<ST> superTypeClass,
String name, ImmutableList<String> superTypes, AttributeInfo... fields)
throws MetadataException {
throws AtlasException {
this.typeSystem = typeSystem;
this.superTypeClass = superTypeClass;
this.name = name;
......@@ -100,20 +100,20 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
/**
* Given type must be a SubType of this type.
* @param typeName
* @throws MetadataException
* @throws AtlasException
*/
public boolean isSubType(String typeName) throws MetadataException {
public boolean isSubType(String typeName) throws AtlasException {
HierarchicalType cType = typeSystem.getDataType(HierarchicalType.class, typeName);
return (cType == this || cType.superTypePaths.containsKey(getName()));
}
protected void setupSuperTypesGraph()
throws MetadataException {
throws AtlasException {
setupSuperTypesGraph(superTypes);
}
private void setupSuperTypesGraph(ImmutableList<String> superTypes)
throws MetadataException {
throws AtlasException {
Map<String, List<Path>> superTypePaths = new HashMap<String, List<Path>>();
Map<String, Path> pathNameToPathMap = new HashMap<String, Path>();
Queue<Path> queue = new LinkedList<Path>();
......@@ -150,7 +150,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
protected Pair<FieldMapping, ImmutableMap<String, String>> constructFieldMapping(ImmutableList<String> superTypes,
AttributeInfo... fields)
throws MetadataException {
throws AtlasException {
Map<String, AttributeInfo> fieldsMap = new LinkedHashMap<String, AttributeInfo>();
Map<String, Integer> fieldPos = new HashMap<String, Integer>();
......@@ -190,7 +190,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
for (AttributeInfo i : superTypeFields) {
if (superType == this) {
if (immediateFields.contains(i.name)) {
throw new MetadataException(
throw new AtlasException(
String.format(
"Struct defintion cannot contain multiple fields with the" +
" same name %s",
......@@ -257,7 +257,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
fieldPos.put(attrName, numReferenceables);
numReferenceables++;
} else {
throw new MetadataException(String.format("Unknown datatype %s", i.dataType()));
throw new AtlasException(String.format("Unknown datatype %s", i.dataType()));
}
}
}
......@@ -287,23 +287,23 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
return new Pair(fm, ImmutableMap.copyOf(attributeNameToType));
}
public IStruct castAs(IStruct s, String superTypeName) throws MetadataException {
public IStruct castAs(IStruct s, String superTypeName) throws AtlasException {
if (!superTypePaths.containsKey(superTypeName)) {
throw new MetadataException(
throw new AtlasException(
String.format("Cannot downcast to %s from type %s", superTypeName, getName()));
}
if (s != null) {
if (s.getTypeName() != getName()) {
throw new MetadataException(
throw new AtlasException(
String.format("Downcast called on wrong type %s, instance type is %s",
getName(), s.getTypeName()));
}
List<Path> pathToSuper = superTypePaths.get(superTypeName);
if (pathToSuper.size() > 1) {
throw new MetadataException(
throw new AtlasException(
String.format(
"Cannot downcast called to %s, from %s: there are multiple paths " +
"to SuperType",
......@@ -321,18 +321,18 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
return null;
}
public ST getDefinedType(String attrName) throws MetadataException {
public ST getDefinedType(String attrName) throws AtlasException {
if (!attributeNameToType.containsKey(attrName)) {
throw new MetadataException(String.format("Unknown attribute %s in type %s", attrName, getName()));
throw new AtlasException(String.format("Unknown attribute %s in type %s", attrName, getName()));
}
return typeSystem.getDataType(superTypeClass, attributeNameToType.get(attrName));
}
public String getDefinedTypeName(String attrName) throws MetadataException {
public String getDefinedTypeName(String attrName) throws AtlasException {
return getDefinedType(attrName).getName();
}
public String getQualifiedName(String attrName) throws MetadataException {
public String getQualifiedName(String attrName) throws AtlasException {
String attrTypeName = getDefinedTypeName(attrName);
return attrName.contains(".") ? attrName : String.format("%s.%s", attrTypeName, attrName);
}
......@@ -396,7 +396,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
*/
Map<String, String> hiddenAttributeMap;
Path(String typeName, Path childPath) throws MetadataException {
Path(String typeName, Path childPath) throws AtlasException {
this.typeName = typeName;
this.subTypePath = childPath;
if (childPath.contains(typeName)) {
......@@ -449,7 +449,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
}
}
static class CyclicTypeDefinition extends MetadataException {
static class CyclicTypeDefinition extends AtlasException {
CyclicTypeDefinition(Path p) {
super(String.format("Cycle in Type Definition %s", p.pathString(" -> ")));
......@@ -476,7 +476,7 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A
ST t = null;
try {
t = (ST) typeSystem.getDataType(superTypeClass, p.typeName);
} catch (MetadataException me) {
} catch (AtlasException me) {
throw new RuntimeException(me);
}
if (t.superTypes != null) {
......
......@@ -19,14 +19,14 @@
package org.apache.atlas.typesystem.types;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.ITypedInstance;
import java.util.List;
public interface IConstructableType<U, T extends ITypedInstance> extends IDataType<U> {
T createInstance() throws MetadataException;
T createInstance() throws AtlasException;
FieldMapping fieldMapping();
......
......@@ -18,14 +18,14 @@
package org.apache.atlas.typesystem.types;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
public interface IDataType<T> {
String getName();
T convert(Object val, Multiplicity m) throws MetadataException;
T convert(Object val, Multiplicity m) throws AtlasException;
DataTypes.TypeCategory getTypeCategory();
void output(T val, Appendable buf, String prefix) throws MetadataException;
void output(T val, Appendable buf, String prefix) throws AtlasException;
}
......@@ -20,7 +20,7 @@ package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.persistence.Id;
......@@ -40,14 +40,14 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
Set<Id> processedIds;
public ObjectGraphTraversal(TypeSystem typeSystem, IReferenceableInstance start)
throws MetadataException {
throws AtlasException {
this.typeSystem = typeSystem;
queue = new LinkedList<InstanceTuple>();
processedIds = new HashSet<Id>();
processReferenceableInstance(start);
}
void processValue(IDataType dT, Object val) throws MetadataException {
void processValue(IDataType dT, Object val) throws AtlasException {
if (val != null) {
if (dT.getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
IDataType elemType = ((DataTypes.ArrayType) dT).getElemType();
......@@ -65,7 +65,7 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
}
}
void processMap(IDataType keyType, IDataType valueType, Object val) throws MetadataException {
void processMap(IDataType keyType, IDataType valueType, Object val) throws AtlasException {
if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE &&
valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
return;
......@@ -85,7 +85,7 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
}
}
void processCollection(IDataType elemType, Object val) throws MetadataException {
void processCollection(IDataType elemType, Object val) throws AtlasException {
if (elemType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
return;
......@@ -110,7 +110,7 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
}
}
void processStruct(Object val) throws MetadataException {
void processStruct(Object val) throws AtlasException {
if (val == null || !(val instanceof IStruct)) {
return;
......@@ -129,7 +129,7 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
}
}
void processReferenceableInstance(Object val) throws MetadataException {
void processReferenceableInstance(Object val) throws AtlasException {
if (val == null || !(val instanceof IReferenceableInstance || val instanceof Id)) {
return;
......@@ -175,7 +175,7 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta
InstanceTuple t = queue.poll();
processReferenceableInstance(t.instance);
return t;
} catch (MetadataException me) {
} catch (AtlasException me) {
throw new RuntimeException(me);
}
}
......
......@@ -20,7 +20,7 @@ package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.persistence.Id;
......@@ -48,13 +48,13 @@ public class ObjectGraphWalker {
Set<Id> processedIds;
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor)
throws MetadataException {
throws AtlasException {
this(typeSystem, nodeProcessor, (IReferenceableInstance) null);
}
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor,
IReferenceableInstance start)
throws MetadataException {
throws AtlasException {
this.typeSystem = typeSystem;
this.nodeProcessor = nodeProcessor;
queue = new LinkedList<IReferenceableInstance>();
......@@ -66,7 +66,7 @@ public class ObjectGraphWalker {
public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor,
List<? extends IReferenceableInstance> roots)
throws MetadataException {
throws AtlasException {
this.typeSystem = typeSystem;
this.nodeProcessor = nodeProcessor;
queue = new LinkedList<IReferenceableInstance>();
......@@ -76,7 +76,7 @@ public class ObjectGraphWalker {
}
}
public void walk() throws MetadataException {
public void walk() throws AtlasException {
while (!queue.isEmpty()) {
IReferenceableInstance r = queue.poll();
processReferenceableInstance(r);
......@@ -87,7 +87,7 @@ public class ObjectGraphWalker {
visitReferenceableInstance(root);
}
void traverseValue(IDataType dT, Object val) throws MetadataException {
void traverseValue(IDataType dT, Object val) throws AtlasException {
if (val != null) {
if (dT.getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
IDataType elemType = ((DataTypes.ArrayType) dT).getElemType();
......@@ -105,7 +105,7 @@ public class ObjectGraphWalker {
}
}
void visitMap(IDataType keyType, IDataType valueType, Object val) throws MetadataException {
void visitMap(IDataType keyType, IDataType valueType, Object val) throws AtlasException {
if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE &&
valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
return;
......@@ -125,7 +125,7 @@ public class ObjectGraphWalker {
}
}
void visitCollection(IDataType elemType, Object val) throws MetadataException {
void visitCollection(IDataType elemType, Object val) throws AtlasException {
if (elemType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
return;
......@@ -150,7 +150,7 @@ public class ObjectGraphWalker {
}
}
void visitStruct(Object val) throws MetadataException {
void visitStruct(Object val) throws AtlasException {
if (val == null || !(val instanceof IStruct)) {
return;
......@@ -187,7 +187,7 @@ public class ObjectGraphWalker {
}
}
void processReferenceableInstance(IReferenceableInstance ref) throws MetadataException {
void processReferenceableInstance(IReferenceableInstance ref) throws AtlasException {
nodeProcessor.processNode(new Node(ref, null, null, null));
visitStruct(ref);
......@@ -199,7 +199,7 @@ public class ObjectGraphWalker {
public static interface NodeProcessor {
void processNode(Node nd) throws MetadataException;
void processNode(Node nd) throws AtlasException;
}
/**
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedStruct;
......@@ -52,7 +52,7 @@ public class StructType extends AbstractDataType<IStruct>
protected StructType(TypeSystem typeSystem, String name,
ImmutableList<String> superTypes, AttributeInfo... fields)
throws MetadataException {
throws AtlasException {
this.typeSystem = typeSystem;
this.name = name;
this.fieldMapping = constructFieldMapping(superTypes,
......@@ -73,7 +73,7 @@ public class StructType extends AbstractDataType<IStruct>
protected FieldMapping constructFieldMapping(ImmutableList<String> superTypes,
AttributeInfo... fields)
throws MetadataException {
throws AtlasException {
Map<String, AttributeInfo> fieldsMap = new LinkedHashMap<String, AttributeInfo>();
Map<String, Integer> fieldPos = new HashMap<String, Integer>();
......@@ -96,7 +96,7 @@ public class StructType extends AbstractDataType<IStruct>
for (AttributeInfo i : fields) {
if (fieldsMap.containsKey(i.name)) {
throw new MetadataException(
throw new AtlasException(
String.format(
"Struct defintion cannot contain multiple fields with the same " +
"name %s",
......@@ -154,7 +154,7 @@ public class StructType extends AbstractDataType<IStruct>
fieldPos.put(i.name, numReferenceables);
numReferenceables++;
} else {
throw new MetadataException(String.format("Unknown datatype %s", i.dataType()));
throw new AtlasException(String.format("Unknown datatype %s", i.dataType()));
}
}
......@@ -185,7 +185,7 @@ public class StructType extends AbstractDataType<IStruct>
}
@Override
public ITypedStruct convert(Object val, Multiplicity m) throws MetadataException {
public ITypedStruct convert(Object val, Multiplicity m) throws AtlasException {
return handler.convert(val, m);
}
......@@ -194,7 +194,7 @@ public class StructType extends AbstractDataType<IStruct>
}
@Override
public void output(IStruct s, Appendable buf, String prefix) throws MetadataException {
public void output(IStruct s, Appendable buf, String prefix) throws AtlasException {
handler.output(s, buf, prefix);
}
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedStruct;
......@@ -44,7 +44,7 @@ public class TraitType extends HierarchicalType<TraitType, IStruct>
TraitType(TypeSystem typeSystem, String name, ImmutableList<String> superTraits,
AttributeInfo... fields)
throws MetadataException {
throws AtlasException {
super(typeSystem, TraitType.class, name, superTraits, fields);
handler = new TypedStructHandler(this);
infoToNameMap = TypeUtils.buildAttrInfoToNameMap(fieldMapping);
......@@ -56,7 +56,7 @@ public class TraitType extends HierarchicalType<TraitType, IStruct>
}
@Override
public ITypedStruct convert(Object val, Multiplicity m) throws MetadataException {
public ITypedStruct convert(Object val, Multiplicity m) throws AtlasException {
return handler.convert(val, m);
}
......@@ -65,7 +65,7 @@ public class TraitType extends HierarchicalType<TraitType, IStruct>
}
@Override
public void output(IStruct s, Appendable buf, String prefix) throws MetadataException {
public void output(IStruct s, Appendable buf, String prefix) throws AtlasException {
handler.output(s, buf, prefix);
}
......
......@@ -20,7 +20,7 @@ package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.TypesDef;
import scala.collection.JavaConversions;
......@@ -42,11 +42,11 @@ public class TypeUtils {
Pattern.compile(String.format("map<(%s),(%s)>", NAME_REGEX, NAME_REGEX));
public static void outputVal(String val, Appendable buf, String prefix)
throws MetadataException {
throws AtlasException {
try {
buf.append(prefix).append(val);
} catch (IOException ie) {
throw new MetadataException(ie);
throw new AtlasException(ie);
}
}
......
......@@ -20,7 +20,7 @@ package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.Struct;
......@@ -43,7 +43,7 @@ public class TypedStructHandler {
fieldMapping = structType.fieldMapping();
}
public ITypedStruct convert(Object val, Multiplicity m) throws MetadataException {
public ITypedStruct convert(Object val, Multiplicity m) throws AtlasException {
if (val != null) {
if (val instanceof ITypedStruct) {
ITypedStruct ts = (ITypedStruct) val;
......@@ -110,7 +110,7 @@ public class TypedStructHandler {
: new Id[fieldMapping.numReferenceables]);
}
public void output(IStruct s, Appendable buf, String prefix) throws MetadataException {
public void output(IStruct s, Appendable buf, String prefix) throws AtlasException {
fieldMapping.output(s, buf, prefix);
}
......
......@@ -18,9 +18,9 @@
package org.apache.atlas.typesystem.types;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
public class ValueConversionException extends MetadataException {
public class ValueConversionException extends AtlasException {
public ValueConversionException(IDataType typ, Object val) {
this(typ, val, (Throwable) null);
......
......@@ -21,7 +21,7 @@ package org.apache.atlas.typesystem.json
import java.text.SimpleDateFormat
import com.google.common.collect.ImmutableList
import org.apache.atlas.MetadataException
import org.apache.atlas.AtlasException
import org.apache.atlas.typesystem.TypesDef
import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, MapType, TypeCategory}
import org.apache.atlas.typesystem.types._
......@@ -234,7 +234,7 @@ trait TypeHelpers {
sts, attrDefs.toArray)
}
@throws(classOf[MetadataException])
@throws(classOf[AtlasException])
def defineClassType(ts: TypeSystem, classDef: HierarchicalTypeDefinition[ClassType]): ClassType = {
ts.defineTypes(ImmutableList.of[StructTypeDefinition],
ImmutableList.of[HierarchicalTypeDefinition[TraitType]],
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.typesystem.json;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.ITypedInstance;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
......@@ -58,7 +58,7 @@ public class SerializationJavaTest extends BaseTest {
* Persons can have SecurityClearance(level : Int) clearance.
*/
@Test
public void test1() throws MetadataException {
public void test1() throws AtlasException {
TypeSystem ts = getTypeSystem();
......@@ -152,7 +152,7 @@ public class SerializationJavaTest extends BaseTest {
}
@Test
public void testTrait() throws MetadataException {
public void testTrait() throws AtlasException {
TypeSystem ts = getTypeSystem();
HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = createTraitTypeDef(
......
......@@ -21,7 +21,7 @@ package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
......@@ -39,7 +39,7 @@ public abstract class BaseTest {
public static final String TEST_DATE = "2014-12-11T02:35:58.440Z";
public static final long TEST_DATE_IN_LONG=1418265358440L;
public static Struct createStruct() throws MetadataException {
public static Struct createStruct() throws AtlasException {
StructType structType = TypeSystem.getInstance().getDataType(
StructType.class, STRUCT_TYPE_1);
Struct s = new Struct(structType.getName());
......@@ -102,13 +102,13 @@ public abstract class BaseTest {
}
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs)
throws MetadataException {
throws AtlasException {
return getTypeSystem().defineTraitTypes(tDefs);
}
protected Map<String, IDataType> defineClasses(
HierarchicalTypeDefinition<ClassType>... classDefs) throws MetadataException {
HierarchicalTypeDefinition<ClassType>... classDefs) throws AtlasException {
return getTypeSystem().defineClassTypes(classDefs);
}
......@@ -120,7 +120,7 @@ public abstract class BaseTest {
*
* Persons can have SecurityClearance(level : Int) clearance.
*/
protected void defineDeptEmployeeTypes(TypeSystem ts) throws MetadataException {
protected void defineDeptEmployeeTypes(TypeSystem ts) throws AtlasException {
HierarchicalTypeDefinition<ClassType> deptTypeDef = TypesUtil
.createClassTypeDef("Department",
......@@ -166,7 +166,7 @@ public abstract class BaseTest {
);
}
protected Referenceable createDeptEg1(TypeSystem ts) throws MetadataException {
protected Referenceable createDeptEg1(TypeSystem ts) throws AtlasException {
Referenceable hrDept = new Referenceable("Department");
Referenceable john = new Referenceable("Person");
Referenceable jane = new Referenceable("Manager", "SecurityClearance");
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.typesystem.types;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
import org.junit.Assert;
......@@ -33,7 +33,7 @@ public class ClassTest extends BaseTest {
}
@Test
public void test1() throws MetadataException {
public void test1() throws AtlasException {
TypeSystem ts = getTypeSystem();
......
......@@ -21,7 +21,7 @@ package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.ITypedStruct;
......@@ -47,7 +47,7 @@ public class EnumTest extends BaseTest {
super.setup();
}
void defineEnums(TypeSystem ts) throws MetadataException {
void defineEnums(TypeSystem ts) throws AtlasException {
ts.defineEnumType("HiveObjectType",
new EnumValue("GLOBAL", 1),
new EnumValue("DATABASE", 2),
......@@ -72,7 +72,7 @@ public class EnumTest extends BaseTest {
}
protected void fillStruct(Struct s) throws MetadataException {
protected void fillStruct(Struct s) throws AtlasException {
s.set("a", 1);
s.set("b", true);
s.set("c", (byte) 1);
......@@ -98,19 +98,19 @@ public class EnumTest extends BaseTest {
s.set("enum4", 3);
}
protected Struct createStructWithEnum(String typeName) throws MetadataException {
protected Struct createStructWithEnum(String typeName) throws AtlasException {
Struct s = new Struct(typeName);
fillStruct(s);
return s;
}
protected Referenceable createInstanceWithEnum(String typeName) throws MetadataException {
protected Referenceable createInstanceWithEnum(String typeName) throws AtlasException {
Referenceable r = new Referenceable(typeName);
fillStruct(r);
return r;
}
protected ClassType defineClassTypeWithEnum(TypeSystem ts) throws MetadataException {
protected ClassType defineClassTypeWithEnum(TypeSystem ts) throws AtlasException {
return ts.defineClassType(createClassTypeDef("t4",
ImmutableList.<String>of(),
createRequiredAttrDef("a", DataTypes.INT_TYPE),
......@@ -136,7 +136,7 @@ public class EnumTest extends BaseTest {
}
@Test
public void testStruct() throws MetadataException {
public void testStruct() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineEnums(ts);
StructType structType = ts.defineStructType("t3",
......@@ -188,7 +188,7 @@ public class EnumTest extends BaseTest {
}
@Test
public void testClass() throws MetadataException {
public void testClass() throws AtlasException {
TypeSystem ts = getTypeSystem();
defineEnums(ts);
ClassType clsType = defineClassTypeWithEnum(ts);
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.typesystem.types;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.Struct;
import org.junit.Assert;
......@@ -39,7 +39,7 @@ public class StructTest extends BaseTest {
}
@Test
public void test1() throws MetadataException {
public void test1() throws AtlasException {
Struct s = createStruct();
ITypedStruct ts = structType.convert(s, Multiplicity.REQUIRED);
Assert.assertEquals(ts.toString(), "{\n" +
......@@ -62,7 +62,7 @@ public class StructTest extends BaseTest {
}
@Test
public void testRecursive() throws MetadataException {
public void testRecursive() throws AtlasException {
Struct s1 = new Struct(recursiveStructType.getName());
s1.set("a", 1);
Struct s2 = new Struct(recursiveStructType.getName());
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.Struct;
......@@ -62,7 +62,7 @@ public class TraitTest extends BaseTest {
* available as 'b'.
*/
@Test
public void test1() throws MetadataException {
public void test1() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
......@@ -171,7 +171,7 @@ public class TraitTest extends BaseTest {
}
@Test
public void testRandomOrder() throws MetadataException {
public void testRandomOrder() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.typesystem.types;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedInstance;
import org.apache.atlas.typesystem.ITypedStruct;
......@@ -50,7 +50,7 @@ public class TypeInheritanceTest extends BaseTest {
* B(b) extends A
*/
@Test
public void testSimpleInheritance() throws MetadataException {
public void testSimpleInheritance() throws AtlasException {
HierarchicalTypeDefinition A = createClassTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE));
......@@ -79,7 +79,7 @@ public class TypeInheritanceTest extends BaseTest {
* B(b) extends A
*/
@Test
public void testSimpleInheritanceWithOverrides() throws MetadataException {
public void testSimpleInheritanceWithOverrides() throws AtlasException {
HierarchicalTypeDefinition A = createClassTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createRequiredAttrDef("b", DataTypes.BOOLEAN_TYPE));
......@@ -113,7 +113,7 @@ public class TypeInheritanceTest extends BaseTest {
* D(d) extends C
*/
@Test
public void testMultiLevelInheritance() throws MetadataException {
public void testMultiLevelInheritance() throws AtlasException {
HierarchicalTypeDefinition A = createClassTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE));
......@@ -166,7 +166,7 @@ public class TypeInheritanceTest extends BaseTest {
* available as 'b'.
*/
@Test
public void testDiamondInheritance() throws MetadataException {
public void testDiamondInheritance() throws AtlasException {
HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
createRequiredAttrDef("a", DataTypes.INT_TYPE),
createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.typesystem.builders
import org.apache.atlas.MetadataException
import org.apache.atlas.AtlasException
import org.apache.atlas.typesystem.types.{ClassType, Multiplicity, TypeSystem}
import org.scalatest.{BeforeAndAfterAll, FunSuite}
......@@ -117,7 +117,7 @@ class MultiplicityTest extends FunSuite with BeforeAndAfterAll {
"stringSet" ~ (string, multiplicty(0, Int.MaxValue, true))
}
}
val me = intercept[MetadataException] {
val me = intercept[AtlasException] {
TypeSystem.getInstance().defineTypes(tDef)
}
assert("A multiplicty of more than one requires a collection type for attribute 'stringSet'" == me.getMessage)
......
......@@ -38,8 +38,8 @@ public final class Main {
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
private static final String APP_PATH = "app";
private static final String APP_PORT = "port";
private static final String METADATA_HOME = "atlas.home";
private static final String METADATA_LOG_DIR = "atlas.log.dir";
private static final String ATLAS_HOME = "atlas.home";
private static final String ATLAS_LOG_DIR = "atlas.log.dir";
/**
* Prevent users from constructing this.
......@@ -85,11 +85,11 @@ public final class Main {
}
private static void setApplicationHome() {
if (System.getProperty(METADATA_HOME) == null) {
System.setProperty(METADATA_HOME, "target");
if (System.getProperty(ATLAS_HOME) == null) {
System.setProperty(ATLAS_HOME, "target");
}
if (System.getProperty(METADATA_LOG_DIR) == null) {
System.setProperty(METADATA_LOG_DIR, "target/logs");
if (System.getProperty(ATLAS_LOG_DIR) == null) {
System.setProperty(ATLAS_LOG_DIR, "target/logs");
}
}
......@@ -102,7 +102,7 @@ public final class Main {
if (cmd.hasOption(APP_PORT)) {
appPort = Integer.valueOf(cmd.getOptionValue(APP_PORT));
} else {
// default : metadata.enableTLS is true
// default : atlas.enableTLS is true
appPort = StringUtils.isEmpty(enableTLSFlag)
|| enableTLSFlag.equals("true") ? 21443 : 21000;
}
......@@ -112,8 +112,7 @@ public final class Main {
private static boolean isTLSEnabled(String enableTLSFlag, int appPort) {
return Boolean.valueOf(StringUtils.isEmpty(enableTLSFlag)
? System
.getProperty("atlas.enableTLS", (appPort % 1000) == 443 ? "true" : "false")
? System.getProperty("atlas.enableTLS", (appPort % 1000) == 443 ? "true" : "false")
: enableTLSFlag);
}
......
......@@ -20,7 +20,7 @@ package org.apache.atlas.examples;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.TypesDef;
import org.apache.atlas.typesystem.json.InstanceSerialization;
......@@ -84,10 +84,10 @@ public class QuickStart {
"JdbcAccess", "ETL", "Metric", "PII", "Fact", "Dimension"
};
private final MetadataServiceClient metadataServiceClient;
private final AtlasClient metadataServiceClient;
QuickStart(String baseUrl) {
metadataServiceClient = new MetadataServiceClient(baseUrl);
metadataServiceClient = new AtlasClient(baseUrl);
}
void createTypes() throws Exception {
......@@ -291,7 +291,7 @@ public class QuickStart {
String entityJSON = InstanceSerialization.toJson(referenceable, true);
System.out.println("Submitting new entity= " + entityJSON);
JSONObject jsonObject = metadataServiceClient.createEntity(entityJSON);
String guid = jsonObject.getString(MetadataServiceClient.GUID);
String guid = jsonObject.getString(AtlasClient.GUID);
System.out.println("created instance for type " + typeName + ", guid: " + guid);
// return the Id for created instance with guid
......@@ -466,7 +466,7 @@ public class QuickStart {
private void search() throws Exception {
for (String dslQuery : getDSLQueries()) {
JSONObject response = metadataServiceClient.searchEntity(dslQuery);
JSONObject results = response.getJSONObject(MetadataServiceClient.RESULTS);
JSONObject results = response.getJSONObject(AtlasClient.RESULTS);
if (!results.isNull("rows")) {
JSONArray rows = results.getJSONArray("rows");
System.out.println("query [" + dslQuery + "] returned [" + rows.length() + "] rows");
......
......@@ -42,8 +42,8 @@ import java.util.Properties;
* todo: Subclass of {@link org.apache.hadoop.security.authentication.server.AuthenticationFilter}.
*/
@Singleton
public class MetadataAuthenticationFilter extends AuthenticationFilter {
private static final Logger LOG = LoggerFactory.getLogger(MetadataAuthenticationFilter.class);
public class AtlasAuthenticationFilter extends AuthenticationFilter {
private static final Logger LOG = LoggerFactory.getLogger(AtlasAuthenticationFilter.class);
static final String PREFIX = "atlas.http.authentication.";
@Override
......
......@@ -19,7 +19,7 @@
package org.apache.atlas.web.filters;
import com.google.inject.Singleton;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.web.util.DateTimeHelper;
import org.apache.atlas.web.util.Servlets;
import org.slf4j.Logger;
......@@ -68,7 +68,7 @@ public class AuditFilter implements Filter {
filterChain.doFilter(request, response);
} finally {
// put the request id into the response so users can trace logs for this request
((HttpServletResponse) response).setHeader(MetadataServiceClient.REQUEST_ID, requestId);
((HttpServletResponse) response).setHeader(AtlasClient.REQUEST_ID, requestId);
currentThread.setName(oldName);
}
}
......
......@@ -24,12 +24,12 @@ import com.google.inject.servlet.GuiceServletContextListener;
import com.sun.jersey.api.core.PackagesResourceConfig;
import com.sun.jersey.guice.JerseyServletModule;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import org.apache.atlas.MetadataException;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.web.filters.AuditFilter;
import org.apache.atlas.web.filters.MetadataAuthenticationFilter;
import org.apache.atlas.web.filters.AtlasAuthenticationFilter;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.slf4j.Logger;
......@@ -75,16 +75,16 @@ public class GuiceServletConfig extends GuiceServletContextListener {
Map<String, String> params = new HashMap<>();
params.put(PackagesResourceConfig.PROPERTY_PACKAGES, packages);
serve("/" + MetadataServiceClient.BASE_URI + "*").with(GuiceContainer.class, params);
serve("/" + AtlasClient.BASE_URI + "*").with(GuiceContainer.class, params);
}
private void configureAuthenticationFilter() throws ConfigurationException {
try {
PropertiesConfiguration configuration = PropertiesUtil.getApplicationProperties();
if (Boolean.valueOf(configuration.getString(HTTP_AUTHENTICATION_ENABLED))) {
filter("/*").through(MetadataAuthenticationFilter.class);
filter("/*").through(AtlasAuthenticationFilter.class);
}
} catch (MetadataException e) {
} catch (AtlasException e) {
LOG.warn("Error loading configuration and initializing authentication filter", e);
}
}
......
......@@ -16,7 +16,7 @@
*/
package org.apache.atlas.web.listeners;
import org.apache.atlas.MetadataException;
import org.apache.atlas.AtlasException;
import org.apache.atlas.PropertiesUtil;
import org.apache.atlas.security.SecurityProperties;
import org.apache.commons.configuration.ConfigurationException;
......@@ -139,7 +139,7 @@ public class LoginProcessor {
protected PropertiesConfiguration getPropertiesConfiguration() throws ConfigurationException {
try {
return PropertiesUtil.getApplicationProperties();
} catch (MetadataException e) {
} catch (AtlasException e) {
throw new ConfigurationException(e);
}
}
......
......@@ -19,8 +19,8 @@
package org.apache.atlas.web.resources;
import com.google.common.base.Preconditions;
import org.apache.atlas.MetadataException;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.ParamChecker;
import org.apache.atlas.TypeNotFoundException;
import org.apache.atlas.repository.EntityNotFoundException;
......@@ -99,9 +99,9 @@ public class EntityResource {
URI locationURI = ub.path(guid).build();
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.GUID, guid);
response.put(MetadataServiceClient.DEFINITION,
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.GUID, guid);
response.put(AtlasClient.DEFINITION,
metadataService.getEntityDefinition(guid));
return Response.created(locationURI).entity(response).build();
......@@ -110,7 +110,7 @@ public class EntityResource {
LOG.error("Unable to persist entity instance due to a desrialization error ", ve);
throw new WebApplicationException(
Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST));
} catch (MetadataException | IllegalArgumentException e) {
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to persist entity instance", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
......@@ -136,15 +136,15 @@ public class EntityResource {
final String entityDefinition = metadataService.getEntityDefinition(guid);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.GUID, guid);
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.GUID, guid);
Response.Status status = Response.Status.NOT_FOUND;
if (entityDefinition != null) {
response.put(MetadataServiceClient.DEFINITION, entityDefinition);
response.put(AtlasClient.DEFINITION, entityDefinition);
status = Response.Status.OK;
} else {
response.put(MetadataServiceClient.ERROR, Servlets.escapeJsonString(
response.put(AtlasClient.ERROR, Servlets.escapeJsonString(
String.format("An entity with GUID={%s} does not exist", guid)));
}
......@@ -154,7 +154,7 @@ public class EntityResource {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (MetadataException | IllegalArgumentException e) {
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Bad GUID={}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
......@@ -180,17 +180,17 @@ public class EntityResource {
final List<String> entityList = metadataService.getEntityList(entityType);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.TYPENAME, entityType);
response.put(MetadataServiceClient.RESULTS, new JSONArray(entityList));
response.put(MetadataServiceClient.COUNT, entityList.size());
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.TYPENAME, entityType);
response.put(AtlasClient.RESULTS, new JSONArray(entityList));
response.put(AtlasClient.COUNT, entityList.size());
return Response.ok(response).build();
} catch (NullPointerException e) {
LOG.error("Entity type cannot be null", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (MetadataException | IllegalArgumentException e) {
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to get entity list for type {}", entityType, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
......@@ -222,13 +222,13 @@ public class EntityResource {
metadataService.updateEntity(guid, property, value);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Thread.currentThread().getName());
response.put(AtlasClient.REQUEST_ID, Thread.currentThread().getName());
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (MetadataException | IllegalArgumentException e) {
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to add property {} to entity id {}", property, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
......@@ -255,17 +255,17 @@ public class EntityResource {
final List<String> traitNames = metadataService.getTraitNames(guid);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.GUID, guid);
response.put(MetadataServiceClient.RESULTS, new JSONArray(traitNames));
response.put(MetadataServiceClient.COUNT, traitNames.size());
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.GUID, guid);
response.put(AtlasClient.RESULTS, new JSONArray(traitNames));
response.put(AtlasClient.COUNT, traitNames.size());
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (MetadataException | IllegalArgumentException e) {
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to get trait names for entity {}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
......@@ -296,15 +296,15 @@ public class EntityResource {
URI locationURI = ub.path(guid).build();
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.GUID, guid);
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.GUID, guid);
return Response.created(locationURI).entity(response).build();
} catch (EntityNotFoundException | TypeNotFoundException e) {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (MetadataException | IllegalArgumentException e) {
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to add trait for entity={}", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
......@@ -333,8 +333,8 @@ public class EntityResource {
metadataService.deleteTrait(guid, traitName);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(MetadataServiceClient.GUID, guid);
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.GUID, guid);
response.put(TRAIT_NAME, traitName);
return Response.ok(response).build();
......@@ -342,7 +342,7 @@ public class EntityResource {
LOG.error("An entity with GUID={} does not exist", guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (MetadataException | IllegalArgumentException e) {
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
......
......@@ -18,7 +18,7 @@
package org.apache.atlas.web.resources;
import org.apache.atlas.MetadataServiceClient;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.ParamChecker;
import org.apache.atlas.discovery.DiscoveryException;
import org.apache.atlas.discovery.LineageService;
......@@ -80,9 +80,9 @@ public class HiveLineageResource {
final String jsonResult = lineageService.getInputsGraph(tableName);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put("tableName", tableName);
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
response.put(AtlasClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
......@@ -118,9 +118,9 @@ public class HiveLineageResource {
final String jsonResult = lineageService.getOutputsGraph(tableName);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put("tableName", tableName);
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
response.put(AtlasClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
......@@ -156,9 +156,9 @@ public class HiveLineageResource {
final String jsonResult = lineageService.getSchema(tableName);
JSONObject response = new JSONObject();
response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put("tableName", tableName);
response.put(MetadataServiceClient.RESULTS, new JSONObject(jsonResult));
response.put(AtlasClient.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build();
} catch (EntityNotFoundException e) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment