Commit f379c9ff by ashutoshm Committed by Madhan Neethiraj

ATLAS-1666: updated exception handling to avoid use of generic exceptions

parent 1612b305
......@@ -73,8 +73,7 @@ public class FalconBridge {
* @param cluster ClusterEntity
* @return cluster instance reference
*/
public static Referenceable createClusterEntity(final org.apache.falcon.entity.v0.cluster.Cluster cluster)
throws Exception {
public static Referenceable createClusterEntity(final org.apache.falcon.entity.v0.cluster.Cluster cluster) {
LOG.info("Creating cluster Entity : {}", cluster.getName());
Referenceable clusterRef = new Referenceable(FalconDataTypes.FALCON_CLUSTER.getName());
......@@ -97,7 +96,7 @@ public class FalconBridge {
return clusterRef;
}
private static Referenceable createFeedEntity(Feed feed, Referenceable clusterReferenceable) throws Exception {
private static Referenceable createFeedEntity(Feed feed, Referenceable clusterReferenceable) {
LOG.info("Creating feed dataset: {}", feed.getName());
Referenceable feedEntity = new Referenceable(FalconDataTypes.FALCON_FEED.getName());
......@@ -338,8 +337,7 @@ public class FalconBridge {
return entities;
}
private static Referenceable createHiveDatabaseInstance(String clusterName, String dbName)
throws Exception {
private static Referenceable createHiveDatabaseInstance(String clusterName, String dbName) {
Referenceable dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName());
dbRef.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, clusterName);
dbRef.set(AtlasClient.NAME, dbName);
......@@ -349,7 +347,7 @@ public class FalconBridge {
}
private static List<Referenceable> createHiveTableInstance(String clusterName, String dbName,
String tableName) throws Exception {
String tableName) {
List<Referenceable> entities = new ArrayList<>();
Referenceable dbRef = createHiveDatabaseInstance(clusterName, dbName);
entities.add(dbRef);
......
......@@ -18,16 +18,15 @@
package org.apache.atlas.hive.bridge;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import com.google.common.annotations.VisibleForTesting;
import com.sun.jersey.api.client.ClientResponse;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasConstants;
import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.hive.hook.HiveHook;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.hook.AtlasHookException;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.json.InstanceSerialization;
......@@ -55,8 +54,9 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.sun.jersey.api.client.ClientResponse;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* A Bridge Utility that imports metadata from the Hive Meta Store
......@@ -420,7 +420,7 @@ public class HiveMetaStoreBridge {
* @throws Exception
*/
public Referenceable createTableInstance(Referenceable dbReference, Table hiveTable)
throws Exception {
throws AtlasHookException {
return createOrUpdateTableInstance(dbReference, null, hiveTable);
}
......@@ -429,7 +429,7 @@ public class HiveMetaStoreBridge {
}
private Referenceable createOrUpdateTableInstance(Referenceable dbReference, Referenceable tableReference,
final Table hiveTable) throws Exception {
final Table hiveTable) throws AtlasHookException {
LOG.info("Importing objects from {}.{}", hiveTable.getDbName(), hiveTable.getTableName());
if (tableReference == null) {
......@@ -494,22 +494,26 @@ public class HiveMetaStoreBridge {
return entityQualifiedName + "_storage";
}
private Referenceable registerTable(Referenceable dbReference, Table table) throws Exception {
String dbName = table.getDbName();
String tableName = table.getTableName();
LOG.info("Attempting to register table [{}]", tableName);
Referenceable tableReference = getTableReference(table);
LOG.info("Found result {}", tableReference);
if (tableReference == null) {
tableReference = createTableInstance(dbReference, table);
tableReference = registerInstance(tableReference);
} else {
LOG.info("Table {}.{} is already registered with id {}. Updating entity.", dbName, tableName,
tableReference.getId().id);
tableReference = createOrUpdateTableInstance(dbReference, tableReference, table);
updateInstance(tableReference);
private Referenceable registerTable(Referenceable dbReference, Table table) throws AtlasHookException {
try {
String dbName = table.getDbName();
String tableName = table.getTableName();
LOG.info("Attempting to register table [{}]", tableName);
Referenceable tableReference = getTableReference(table);
LOG.info("Found result {}", tableReference);
if (tableReference == null) {
tableReference = createTableInstance(dbReference, table);
tableReference = registerInstance(tableReference);
} else {
LOG.info("Table {}.{} is already registered with id {}. Updating entity.", dbName, tableName,
tableReference.getId().id);
tableReference = createOrUpdateTableInstance(dbReference, tableReference, table);
updateInstance(tableReference);
}
return tableReference;
} catch (Exception e) {
throw new AtlasHookException("HiveMetaStoreBridge.getStorageDescQFName() failed.", e);
}
return tableReference;
}
private void updateInstance(Referenceable referenceable) throws AtlasServiceException {
......@@ -523,7 +527,7 @@ public class HiveMetaStoreBridge {
}
public Referenceable fillStorageDesc(StorageDescriptor storageDesc, String tableQualifiedName,
String sdQualifiedName, Id tableId) throws Exception {
String sdQualifiedName, Id tableId) throws AtlasHookException {
LOG.debug("Filling storage descriptor information for {}", storageDesc);
Referenceable sdReferenceable = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.getName());
......@@ -590,7 +594,7 @@ public class HiveMetaStoreBridge {
return String.format("%s.%s@%s", tableName, colName.toLowerCase(), clusterName);
}
public List<Referenceable> getColumns(List<FieldSchema> schemaList, Referenceable tableReference) throws Exception {
public List<Referenceable> getColumns(List<FieldSchema> schemaList, Referenceable tableReference) throws AtlasHookException {
List<Referenceable> colList = new ArrayList<>();
int columnPosition = 0;
for (FieldSchema fs : schemaList) {
......@@ -612,8 +616,8 @@ public class HiveMetaStoreBridge {
}
public static void main(String[] args) throws Exception {
public static void main(String[] args) throws AtlasHookException {
try {
Configuration atlasConf = ApplicationProperties.get();
String[] atlasEndpoint = atlasConf.getStringArray(ATLAS_ENDPOINT);
if (atlasEndpoint == null || atlasEndpoint.length == 0){
......@@ -640,5 +644,9 @@ public class HiveMetaStoreBridge {
HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(atlasConf, new HiveConf(), atlasClient);
hiveMetaStoreBridge.importHiveMetadata(failOnError);
}
catch(Exception e) {
throw new AtlasHookException("HiveMetaStoreBridge.main() failed.", e);
}
}
}
......@@ -25,6 +25,7 @@ import org.apache.atlas.AtlasConstants;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.hook.AtlasHook;
import org.apache.atlas.hook.AtlasHookException;
import org.apache.atlas.notification.hook.HookNotification;
import org.apache.atlas.sqoop.model.SqoopDataTypes;
import org.apache.atlas.typesystem.Referenceable;
......@@ -71,8 +72,7 @@ public class SqoopHook extends SqoopJobDataPublisher {
org.apache.hadoop.conf.Configuration.addDefaultResource("sqoop-site.xml");
}
public Referenceable createHiveDatabaseInstance(String clusterName, String dbName)
throws Exception {
public Referenceable createHiveDatabaseInstance(String clusterName, String dbName) {
Referenceable dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName());
dbRef.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, clusterName);
dbRef.set(AtlasClient.NAME, dbName);
......@@ -82,14 +82,14 @@ public class SqoopHook extends SqoopJobDataPublisher {
}
public Referenceable createHiveTableInstance(String clusterName, Referenceable dbRef,
String tableName, String dbName) throws Exception {
Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
tableRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName));
tableRef.set(AtlasClient.NAME, tableName.toLowerCase());
tableRef.set(HiveMetaStoreBridge.DB, dbRef);
return tableRef;
}
String tableName, String dbName) {
Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
tableRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName));
tableRef.set(AtlasClient.NAME, tableName.toLowerCase());
tableRef.set(HiveMetaStoreBridge.DB, dbRef);
return tableRef;
}
private Referenceable createDBStoreInstance(SqoopJobDataPublisher.Data data)
throws ImportException {
......@@ -173,19 +173,24 @@ public class SqoopHook extends SqoopJobDataPublisher {
}
@Override
public void publish(SqoopJobDataPublisher.Data data) throws Exception {
Configuration atlasProperties = ApplicationProperties.get();
String clusterName = atlasProperties.getString(ATLAS_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);
Referenceable dbStoreRef = createDBStoreInstance(data);
Referenceable dbRef = createHiveDatabaseInstance(clusterName, data.getHiveDB());
Referenceable hiveTableRef = createHiveTableInstance(clusterName, dbRef,
data.getHiveTable(), data.getHiveDB());
Referenceable procRef = createSqoopProcessInstance(dbStoreRef, hiveTableRef, data, clusterName);
int maxRetries = atlasProperties.getInt(HOOK_NUM_RETRIES, 3);
HookNotification.HookNotificationMessage message =
new HookNotification.EntityCreateRequest(AtlasHook.getUser(), dbStoreRef, dbRef, hiveTableRef, procRef);
AtlasHook.notifyEntities(Arrays.asList(message), maxRetries);
public void publish(SqoopJobDataPublisher.Data data) throws AtlasHookException {
try {
Configuration atlasProperties = ApplicationProperties.get();
String clusterName = atlasProperties.getString(ATLAS_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);
Referenceable dbStoreRef = createDBStoreInstance(data);
Referenceable dbRef = createHiveDatabaseInstance(clusterName, data.getHiveDB());
Referenceable hiveTableRef = createHiveTableInstance(clusterName, dbRef,
data.getHiveTable(), data.getHiveDB());
Referenceable procRef = createSqoopProcessInstance(dbStoreRef, hiveTableRef, data, clusterName);
int maxRetries = atlasProperties.getInt(HOOK_NUM_RETRIES, 3);
HookNotification.HookNotificationMessage message =
new HookNotification.EntityCreateRequest(AtlasHook.getUser(), dbStoreRef, dbRef, hiveTableRef, procRef);
AtlasHook.notifyEntities(Arrays.asList(message), maxRetries);
}
catch(Exception e) {
throw new AtlasHookException("SqoopHook.publish() failed.", e);
}
}
}
......@@ -34,6 +34,7 @@ import java.util.Map;
public class StormAtlasHook implements ISubmitterHook {
private static final Logger LOG = LoggerFactory.getLogger(StormAtlasHook.class);
private static final String ATLAS_PLUGIN_TYPE = "storm";
private static final String ATLAS_STORM_HOOK_IMPL_CLASSNAME = "org.apache.atlas.storm.hook.StormAtlasHook";
......
......@@ -42,7 +42,7 @@ public final class StormTopologyUtil {
private StormTopologyUtil() {
}
public static Set<String> getTerminalUserBoltNames(StormTopology topology) throws Exception {
public static Set<String> getTerminalUserBoltNames(StormTopology topology) {
Set<String> terminalBolts = new HashSet<>();
Set<String> inputs = new HashSet<>();
for (Map.Entry<String, Bolt> entry : topology.get_bolts().entrySet()) {
......
......@@ -67,9 +67,10 @@ public enum AtlasErrorCode {
INSTANCE_LINEAGE_INVALID_PARAMS(400, "ATLAS-400-00-026", "Invalid lineage query parameters passed {0}: {1}"),
ATTRIBUTE_UPDATE_NOT_SUPPORTED(400, "ATLAS-400-00-027", "{0}.{1} : attribute update not supported"),
INVALID_VALUE(400, "ATLAS-400-00-028", "invalid value: {0}"),
BAD_REQUEST(400, "ATLAS-400-00-020", "{0}"),
BAD_REQUEST(400, "ATLAS-400-00-029", "{0}"),
PARAMETER_PARSING_FAILED(400, "ATLAS-400-00-02A", "Parameter parsing failed at: {0}"),
// All Not found enums go here
// All Not found enums go here
TYPE_NAME_NOT_FOUND(404, "ATLAS-404-00-001", "Given typename {0} was invalid"),
TYPE_GUID_NOT_FOUND(404, "ATLAS-404-00-002", "Given type guid {0} was invalid"),
EMPTY_RESULTS(404, "ATLAS-404-00-004", "No result found for {0}"),
......@@ -96,7 +97,15 @@ public enum AtlasErrorCode {
NOTIFICATION_FAILED(500, "ATLAS-500-00-007", "Failed to notify for change {0}"),
GREMLIN_GROOVY_SCRIPT_ENGINE_FAILED(500, "ATLAS-500-00-008", "scriptEngine cannot be initialized for: {0}"),
JSON_ERROR_OBJECT_MAPPER_NULL_RETURNED(500, "ATLAS-500-00-009", "ObjectMapper.readValue returned NULL for class: {0}"),
GREMLIN_SCRIPT_EXECUTION_FAILED(500, "ATLAS-500-00-00A", "Script execution failed for: {0}");
GREMLIN_SCRIPT_EXECUTION_FAILED(500, "ATLAS-500-00-00A", "Script execution failed for: {0}"),
CURATOR_FRAMEWORK_UPDATE(500, "ATLAS-500-00-00B", "ActiveInstanceState.update resulted in exception."),
QUICK_START(500, "ATLAS-500-00-00C", "Failed to run QuickStart: {0}"),
EMBEDDED_SERVER_START(500, "ATLAS-500-00-00D", "EmbeddedServer.Start: failed!"),
STORM_TOPOLOGY_UTIL(500, "ATLAS-500-00-00E", "StormToplogyUtil: {0}"),
SQOOP_HOOK(500, "ATLAS-500-00-00F", "SqoopHook: {0}"),
HIVE_HOOK(500, "ATLAS-500-00-010", "HiveHook: {0}"),
HIVE_HOOK_METASTORE_BRIDGE(500, "ATLAS-500-00-011", "HiveHookMetaStoreBridge: {0}");
private String errorCode;
private String errorMessage;
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.hook;
/**
* Exception class for Atlas Hooks.
*/
public class AtlasHookException extends Exception {
public AtlasHookException() {
}
public AtlasHookException(String message) {
super(message);
}
public AtlasHookException(String message, Throwable cause) {
super(message, cause);
}
public AtlasHookException(Throwable cause) {
super(cause);
}
public AtlasHookException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
......@@ -24,7 +24,9 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.TypesDef;
import org.apache.atlas.typesystem.json.InstanceSerialization;
......@@ -323,39 +325,48 @@ public class QuickStart {
}
Id database(String name, String description, String owner, String locationUri, String... traitNames)
throws Exception {
Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("description", description);
referenceable.set("owner", owner);
referenceable.set("locationUri", locationUri);
referenceable.set("createTime", System.currentTimeMillis());
return createInstance(referenceable);
throws AtlasBaseException {
try {
Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("description", description);
referenceable.set("owner", owner);
referenceable.set("locationUri", locationUri);
referenceable.set("createTime", System.currentTimeMillis());
return createInstance(referenceable);
} catch (Exception e) {
throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s database entity creation failed", name));
}
}
Referenceable rawStorageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed)
throws Exception {
Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE);
referenceable.set("location", location);
referenceable.set("inputFormat", inputFormat);
referenceable.set("outputFormat", outputFormat);
referenceable.set("compressed", compressed);
Referenceable rawStorageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed) {
Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE);
referenceable.set("location", location);
referenceable.set("inputFormat", inputFormat);
referenceable.set("outputFormat", outputFormat);
referenceable.set("compressed", compressed);
return referenceable;
return referenceable;
}
Referenceable rawColumn(String name, String dataType, String comment, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("dataType", dataType);
referenceable.set("comment", comment);
Referenceable rawColumn(String name, String dataType, String comment, String... traitNames) throws AtlasBaseException {
try {
Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set("dataType", dataType);
referenceable.set("comment", comment);
return referenceable;
return referenceable;
}
catch(Exception e) {
throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s, column entity creation failed", name));
}
}
Id table(String name, String description, Id dbId, Referenceable sd, String owner, String tableType,
List<Referenceable> columns, String... traitNames) throws Exception {
List<Referenceable> columns, String... traitNames) throws AtlasBaseException {
try {
Referenceable referenceable = new Referenceable(TABLE_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
......@@ -370,46 +381,61 @@ public class QuickStart {
referenceable.set("columns", columns);
return createInstance(referenceable);
} catch (Exception e) {
throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s table entity creation failed", name));
}
}
Id loadProcess(String name, String description, String user, List<Id> inputTables, List<Id> outputTables,
String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames)
throws Exception {
Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames);
// super type attributes
referenceable.set(AtlasClient.NAME, name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description);
referenceable.set(INPUTS_ATTRIBUTE, inputTables);
referenceable.set(OUTPUTS_ATTRIBUTE, outputTables);
referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis());
referenceable.set("endTime", System.currentTimeMillis() + 10000);
referenceable.set("queryText", queryText);
referenceable.set("queryPlan", queryPlan);
referenceable.set("queryId", queryId);
referenceable.set("queryGraph", queryGraph);
return createInstance(referenceable);
String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames)
throws AtlasBaseException {
try {
Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames);
// super type attributes
referenceable.set(AtlasClient.NAME, name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("description", description);
referenceable.set(INPUTS_ATTRIBUTE, inputTables);
referenceable.set(OUTPUTS_ATTRIBUTE, outputTables);
referenceable.set("user", user);
referenceable.set("startTime", System.currentTimeMillis());
referenceable.set("endTime", System.currentTimeMillis() + 10000);
referenceable.set("queryText", queryText);
referenceable.set("queryPlan", queryPlan);
referenceable.set("queryId", queryId);
referenceable.set("queryGraph", queryGraph);
return createInstance(referenceable);
} catch (Exception e) {
throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s process entity creation failed", name));
}
}
Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception {
Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("db", dbId);
Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws AtlasBaseException {
try {
Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
referenceable.set("name", name);
referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
referenceable.set("db", dbId);
referenceable.set(INPUT_TABLES_ATTRIBUTE, inputTables);
referenceable.set(INPUT_TABLES_ATTRIBUTE, inputTables);
return createInstance(referenceable);
return createInstance(referenceable);
} catch (Exception e) {
throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s Id creation", name));
}
}
private void verifyTypesCreated() throws Exception {
List<String> types = metadataServiceClient.listTypes();
for (String type : TYPES) {
assert types.contains(type);
private void verifyTypesCreated() throws AtlasBaseException {
try {
List<String> types = metadataServiceClient.listTypes();
for (String type : TYPES) {
assert types.contains(type);
}
} catch (Exception e) {
throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, "view creation failed.");
}
}
......@@ -461,14 +487,18 @@ public class QuickStart {
"from DataSet", "from Process",};
}
private void search() throws Exception {
for (String dslQuery : getDSLQueries()) {
JSONArray results = metadataServiceClient.search(dslQuery, 10, 0);
if (results != null) {
System.out.println("query [" + dslQuery + "] returned [" + results.length() + "] rows");
} else {
System.out.println("query [" + dslQuery + "] failed, results:" + results);
private void search() throws AtlasBaseException {
try {
for (String dslQuery : getDSLQueries()) {
JSONArray results = metadataServiceClient.search(dslQuery, 10, 0);
if (results != null) {
System.out.println("query [" + dslQuery + "] returned [" + results.length() + "] rows");
} else {
System.out.println("query [" + dslQuery + "] failed, results:" + results);
}
}
} catch (Exception e) {
throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, "one or more dsl queries failed");
}
}
}
......@@ -18,6 +18,8 @@
package org.apache.atlas.web.params;
import org.apache.atlas.exception.AtlasBaseException;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
......@@ -100,7 +102,7 @@ public abstract class AbstractParam<T> {
* @return {@code input}, parsed as an instance of {@code T}
* @throws Exception if there is an error parsing the input
*/
protected abstract T parse(String input) throws Exception;
protected abstract T parse(String input) throws AtlasBaseException;
/**
* Returns the underlying value.
......
......@@ -18,6 +18,9 @@
package org.apache.atlas.web.params;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException;
/**
* A parameter encapsulating boolean values. If the query parameter value is {@code "true"},
* regardless of case, the returned value is {@link Boolean#TRUE}. If the query parameter value is
......@@ -36,13 +39,13 @@ public class BooleanParam extends AbstractParam<Boolean> {
}
@Override
protected Boolean parse(String input) throws Exception {
protected Boolean parse(String input) throws AtlasBaseException {
if ("true".equalsIgnoreCase(input)) {
return Boolean.TRUE;
}
if ("false".equalsIgnoreCase(input)) {
return Boolean.FALSE;
}
throw new Exception();
throw new AtlasBaseException(AtlasErrorCode.PARAMETER_PARSING_FAILED, "Boolean.parse: input=" + input);
}
}
\ No newline at end of file
......@@ -18,6 +18,7 @@
package org.apache.atlas.web.params;
import org.apache.atlas.exception.AtlasBaseException;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
......@@ -32,7 +33,7 @@ public class DateTimeParam extends AbstractParam<DateTime> {
}
@Override
protected DateTime parse(String input) throws Exception {
return new DateTime(input, DateTimeZone.UTC);
protected DateTime parse(String input) throws AtlasBaseException {
return new DateTime(input, DateTimeZone.UTC);
}
}
\ No newline at end of file
......@@ -18,15 +18,11 @@
package org.apache.atlas.web.security;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import javax.annotation.PostConstruct;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.web.model.User;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationConverter;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ldap.core.support.LdapContextSource;
......@@ -41,7 +37,10 @@ import org.springframework.security.ldap.authentication.LdapAuthenticationProvid
import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
import org.springframework.stereotype.Component;
import org.apache.commons.lang.StringUtils;
import javax.annotation.PostConstruct;
import java.util.List;
import java.util.Properties;
@Component
public class AtlasLdapAuthenticationProvider extends
......@@ -87,7 +86,7 @@ public class AtlasLdapAuthenticationProvider extends
}
private Authentication getLdapBindAuthentication(
Authentication authentication) throws Exception {
Authentication authentication) {
try {
if (isDebugEnabled) {
LOG.debug("==> AtlasLdapAuthenticationProvider getLdapBindAuthentication");
......
......@@ -20,7 +20,9 @@ package org.apache.atlas.web.service;
import com.google.inject.Inject;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.ha.HAConfiguration;
import org.apache.commons.configuration.Configuration;
import org.apache.curator.framework.CuratorFramework;
......@@ -81,23 +83,27 @@ public class ActiveInstanceState {
* @throws Exception
* @param serverId ID of this server instance
*/
public void update(String serverId) throws Exception {
CuratorFramework client = curatorFactory.clientInstance();
String atlasServerAddress = HAConfiguration.getBoundAddressForId(configuration, serverId);
HAConfiguration.ZookeeperProperties zookeeperProperties =
HAConfiguration.getZookeeperProperties(configuration);
List<ACL> acls = Arrays.asList(
new ACL[]{AtlasZookeeperSecurityProperties.parseAcl(zookeeperProperties.getAcl(),
ZooDefs.Ids.OPEN_ACL_UNSAFE.get(0))});
Stat serverInfo = client.checkExists().forPath(getZnodePath(zookeeperProperties));
if (serverInfo == null) {
client.create().
withMode(CreateMode.EPHEMERAL).
withACL(acls).
forPath(getZnodePath(zookeeperProperties));
public void update(String serverId) throws AtlasBaseException {
try {
CuratorFramework client = curatorFactory.clientInstance();
HAConfiguration.ZookeeperProperties zookeeperProperties =
HAConfiguration.getZookeeperProperties(configuration);
String atlasServerAddress = HAConfiguration.getBoundAddressForId(configuration, serverId);
List<ACL> acls = Arrays.asList(
new ACL[]{AtlasZookeeperSecurityProperties.parseAcl(zookeeperProperties.getAcl(),
ZooDefs.Ids.OPEN_ACL_UNSAFE.get(0))});
Stat serverInfo = client.checkExists().forPath(getZnodePath(zookeeperProperties));
if (serverInfo == null) {
client.create().
withMode(CreateMode.EPHEMERAL).
withACL(acls).
forPath(getZnodePath(zookeeperProperties));
}
client.setData().forPath(getZnodePath(zookeeperProperties),
atlasServerAddress.getBytes(Charset.forName("UTF-8")));
} catch (Exception e) {
throw new AtlasBaseException(AtlasErrorCode.CURATOR_FRAMEWORK_UPDATE, e, "forPath: getZnodePath");
}
client.setData().forPath(getZnodePath(zookeeperProperties),
atlasServerAddress.getBytes(Charset.forName("UTF-8")));
}
private String getZnodePath(HAConfiguration.ZookeeperProperties zookeeperProperties) {
......
......@@ -19,6 +19,8 @@
package org.apache.atlas.web.service;
import org.apache.atlas.AtlasConfiguration;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
......@@ -88,9 +90,13 @@ public class EmbeddedServer {
return connector;
}
public void start() throws Exception {
server.start();
server.join();
public void start() throws AtlasBaseException {
try {
server.start();
server.join();
} catch(Exception e) {
throw new AtlasBaseException(AtlasErrorCode.EMBEDDED_SERVER_START, e);
}
}
public void stop() {
......
......@@ -119,11 +119,15 @@ public class SetupSteps {
}
private void clearSetupInProgress(HAConfiguration.ZookeeperProperties zookeeperProperties)
throws Exception {
throws SetupException {
CuratorFramework client = curatorFactory.clientInstance();
String path = lockPath(zookeeperProperties);
client.delete().forPath(path);
LOG.info("Deleted lock path after completing setup {}", path);
try {
client.delete().forPath(path);
LOG.info("Deleted lock path after completing setup {}", path);
} catch (Exception e) {
throw new SetupException(String.format("SetupSteps.clearSetupInProgress: Failed to get Zookeeper node patH: %s", path), e);
}
}
private String lockPath(HAConfiguration.ZookeeperProperties zookeeperProperties) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment