Commit 1620284e by apoorvnaik Committed by Madhan Neethiraj

ATLAS-1304: Redundant code removal and code simplification

parent f6e27b59
......@@ -20,8 +20,6 @@ package org.apache.atlas.falcon.event;
import org.apache.falcon.entity.v0.Entity;
import java.util.Date;
/**
* Falcon event to interface with Atlas Service.
*/
......
......@@ -178,7 +178,7 @@ public class FalconHookIT {
String inputId = ((List<Id>) processEntity.get("inputs")).get(0).getId()._getId();
Referenceable pathEntity = atlasClient.getEntity(inputId);
assertEquals(pathEntity.getTypeName(), HiveMetaStoreBridge.HDFS_PATH.toString());
assertEquals(pathEntity.getTypeName(), HiveMetaStoreBridge.HDFS_PATH);
List<Location> locations = FeedHelper.getLocations(feedCluster, feed);
Location dataLocation = FileSystemStorage.getLocation(locations, LocationType.DATA);
......
......@@ -575,7 +575,7 @@ public class HiveMetaStoreBridge {
}
public Referenceable fillHDFSDataSet(String pathUri) {
Referenceable ref = new Referenceable(HDFS_PATH.toString());
Referenceable ref = new Referenceable(HDFS_PATH);
ref.set("path", pathUri);
Path path = new Path(pathUri);
ref.set(AtlasClient.NAME, Path.getPathWithoutSchemeAndAuthority(path).toString().toLowerCase());
......
......@@ -37,12 +37,7 @@ import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.hooks.*;
import org.apache.hadoop.hive.ql.hooks.Entity;
import org.apache.hadoop.hive.ql.hooks.Entity.Type;
import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
import org.apache.hadoop.hive.ql.hooks.HookContext;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
......@@ -360,16 +355,16 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
String changedColStringOldName = oldColList.get(0).getName();
String changedColStringNewName = changedColStringOldName;
for (int i = 0; i < oldColList.size(); i++) {
if (!newColHashMap.containsKey(oldColList.get(i))) {
changedColStringOldName = oldColList.get(i).getName();
for (FieldSchema oldCol : oldColList) {
if (!newColHashMap.containsKey(oldCol)) {
changedColStringOldName = oldCol.getName();
break;
}
}
for (int i = 0; i < newColList.size(); i++) {
if (!oldColHashMap.containsKey(newColList.get(i))) {
changedColStringNewName = newColList.get(i).getName();
for (FieldSchema newCol : newColList) {
if (!oldColHashMap.containsKey(newCol)) {
changedColStringNewName = newCol.getName();
break;
}
}
......@@ -395,7 +390,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
if (writeEntity.getType() == Type.TABLE) {
Table newTable = writeEntity.getTable();
createOrUpdateEntities(dgiBridge, event, writeEntity, true, oldTable);
final String newQualifiedTableName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(),
final String newQualifiedTableName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
newTable);
String oldColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newQualifiedTableName, oldColName);
String newColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newQualifiedTableName, newColName);
......@@ -424,9 +419,9 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
Table newTable = writeEntity.getTable();
//Hive sends with both old and new table names in the outputs which is weird. So skipping that with the below check
if (!newTable.getDbName().equals(oldTable.getDbName()) || !newTable.getTableName().equals(oldTable.getTableName())) {
final String oldQualifiedName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(),
final String oldQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
oldTable);
final String newQualifiedName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(),
final String newQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
newTable);
//Create/update old table entity - create entity with oldQFNme and old tableName if it doesnt exist. If exists, will update
......@@ -624,7 +619,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
// filter out select queries which do not modify data
if (!isSelectQuery) {
SortedSet<ReadEntity> sortedHiveInputs = new TreeSet<>(entityComparator);;
SortedSet<ReadEntity> sortedHiveInputs = new TreeSet<>(entityComparator);
if ( event.getInputs() != null) {
sortedHiveInputs.addAll(event.getInputs());
}
......@@ -671,7 +666,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
private <T extends Entity> void processHiveEntity(HiveMetaStoreBridge dgiBridge, HiveEventContext event, T entity, Set<String> dataSetsProcessed,
SortedMap<T, Referenceable> dataSets, Set<Referenceable> entities) throws Exception {
if (entity.getType() == Type.TABLE || entity.getType() == Type.PARTITION) {
final String tblQFName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(), entity.getTable());
final String tblQFName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(), entity.getTable());
if (!dataSetsProcessed.contains(tblQFName)) {
LinkedHashMap<Type, Referenceable> result = createOrUpdateEntities(dgiBridge, event, entity, false);
dataSets.put(entity, result.get(Type.TABLE));
......@@ -754,14 +749,11 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
}
private static boolean isCreateOp(HiveEventContext hiveEvent) {
if (HiveOperation.CREATETABLE.equals(hiveEvent.getOperation())
return HiveOperation.CREATETABLE.equals(hiveEvent.getOperation())
|| HiveOperation.CREATEVIEW.equals(hiveEvent.getOperation())
|| HiveOperation.ALTERVIEW_AS.equals(hiveEvent.getOperation())
|| HiveOperation.ALTERTABLE_LOCATION.equals(hiveEvent.getOperation())
|| HiveOperation.CREATETABLE_AS_SELECT.equals(hiveEvent.getOperation())) {
return true;
}
return false;
|| HiveOperation.CREATETABLE_AS_SELECT.equals(hiveEvent.getOperation());
}
private Referenceable getProcessReferenceable(HiveMetaStoreBridge dgiBridge, HiveEventContext hiveEvent,
......@@ -973,8 +965,8 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
}
private static boolean addQueryType(HiveOperation op, WriteEntity entity) {
if (((WriteEntity) entity).getWriteType() != null && HiveOperation.QUERY.equals(op)) {
switch (((WriteEntity) entity).getWriteType()) {
if (entity.getWriteType() != null && HiveOperation.QUERY.equals(op)) {
switch (entity.getWriteType()) {
case INSERT:
case INSERT_OVERWRITE:
case UPDATE:
......
......@@ -17,8 +17,6 @@
*/
package org.apache.atlas.hive.rewrite;
import org.apache.hadoop.hive.ql.parse.ParseException;
public class RewriteException extends Exception {
public RewriteException(final String message, final Exception exception) {
super(message, exception);
......
......@@ -215,8 +215,8 @@ public class HiveITBase {
protected void validateHDFSPaths(Referenceable processReference, String attributeName, String... testPaths) throws Exception {
List<Id> hdfsPathRefs = (List<Id>) processReference.get(attributeName);
for (int i = 0; i < testPaths.length; i++) {
final Path path = new Path(testPaths[i]);
for (String testPath : testPaths) {
final Path path = new Path(testPath);
final String testPathNormed = lower(path.toString());
String hdfsPathId = assertHDFSPathIsRegistered(testPathNormed);
Assert.assertEquals(hdfsPathRefs.get(0)._getId(), hdfsPathId);
......
......@@ -208,7 +208,7 @@ public class HiveMetaStoreBridgeTest {
when(atlasClient.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")).thenReturn(createTableReference());
String processQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, hiveTables.get(1));
when(atlasClient.getEntity(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
processQualifiedName)).thenReturn(getEntityReference(HiveDataTypes.HIVE_PROCESS.getName(), "82e06b34-9151-4023-aa9d-b82103a50e77"));;
processQualifiedName)).thenReturn(getEntityReference(HiveDataTypes.HIVE_PROCESS.getName(), "82e06b34-9151-4023-aa9d-b82103a50e77"));
HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(CLUSTER_NAME, hiveClient, atlasClient);
try {
......
......@@ -533,7 +533,7 @@ public class HiveHookIT extends HiveITBase {
Referenceable processRef1 = validateProcess(event, expectedInputs, outputs);
//Test sorting of tbl names
SortedSet<String> sortedTblNames = new TreeSet<String>();
SortedSet<String> sortedTblNames = new TreeSet<>();
sortedTblNames.add(inputTable1Name.toLowerCase());
sortedTblNames.add(inputTable2Name.toLowerCase());
......@@ -584,13 +584,13 @@ public class HiveHookIT extends HiveITBase {
Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE);
final Set<WriteEntity> outputs = getOutputs(pFile1, Entity.Type.DFS_DIR);
((WriteEntity)outputs.iterator().next()).setWriteType(WriteEntity.WriteType.PATH_WRITE);
outputs.iterator().next().setWriteType(WriteEntity.WriteType.PATH_WRITE);
final HiveHook.HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.QUERY, inputs, outputs);
Referenceable processReference = validateProcess(hiveEventContext);
validateHDFSPaths(processReference, OUTPUTS, pFile1);
String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
assertTableIsRegistered(DEFAULT_DB, tableName);
validateInputTables(processReference, inputs);
//Rerun same query with same HDFS path
......@@ -630,7 +630,7 @@ public class HiveHookIT extends HiveITBase {
Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE);
final Set<WriteEntity> outputs = getOutputs(pFile1, Entity.Type.DFS_DIR);
((WriteEntity)outputs.iterator().next()).setWriteType(WriteEntity.WriteType.PATH_WRITE);
outputs.iterator().next().setWriteType(WriteEntity.WriteType.PATH_WRITE);
final Set<ReadEntity> partitionIps = new LinkedHashSet<>(inputs);
partitionIps.addAll(getInputs(DEFAULT_DB + "@" + tableName + "@dt='" + PART_FILE + "'", Entity.Type.PARTITION));
......@@ -646,7 +646,7 @@ public class HiveHookIT extends HiveITBase {
runCommand(query);
final Set<WriteEntity> pFile2Outputs = getOutputs(pFile2, Entity.Type.DFS_DIR);
((WriteEntity)pFile2Outputs.iterator().next()).setWriteType(WriteEntity.WriteType.PATH_WRITE);
pFile2Outputs.iterator().next().setWriteType(WriteEntity.WriteType.PATH_WRITE);
//Now the process has 2 paths - one older with deleted reference to partition and another with the the latest partition
Set<WriteEntity> p2Outputs = new LinkedHashSet<WriteEntity>() {{
addAll(pFile2Outputs);
......@@ -676,7 +676,7 @@ public class HiveHookIT extends HiveITBase {
Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE);
Set<WriteEntity> outputs = getOutputs(insertTableName, Entity.Type.TABLE);
outputs.iterator().next().setName(getQualifiedTblName(insertTableName + HiveMetaStoreBridge.TEMP_TABLE_PREFIX + SessionState.get().getSessionId()));
((WriteEntity)outputs.iterator().next()).setWriteType(WriteEntity.WriteType.INSERT);
outputs.iterator().next().setWriteType(WriteEntity.WriteType.INSERT);
validateProcess(constructEvent(query, HiveOperation.QUERY, inputs, outputs));
......@@ -696,7 +696,7 @@ public class HiveHookIT extends HiveITBase {
final Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE);
final Set<WriteEntity> outputs = getOutputs(insertTableName, Entity.Type.TABLE);
((WriteEntity)outputs.iterator().next()).setWriteType(WriteEntity.WriteType.INSERT);
outputs.iterator().next().setWriteType(WriteEntity.WriteType.INSERT);
final Set<ReadEntity> partitionIps = new LinkedHashSet<ReadEntity>() {
{
......@@ -1673,7 +1673,7 @@ public class HiveHookIT extends HiveITBase {
private void verifyProperties(Struct referenceable, Map<String, String> expectedProps, boolean checkIfNotExists) {
Map<String, String> parameters = (Map<String, String>) referenceable.get(HiveMetaStoreBridge.PARAMETERS);
if (checkIfNotExists == false) {
if (!checkIfNotExists) {
//Check if properties exist
Assert.assertNotNull(parameters);
for (String propKey : expectedProps.keySet()) {
......@@ -1745,11 +1745,11 @@ public class HiveHookIT extends HiveITBase {
}
private String getDSTypeName(Entity entity) {
return Entity.Type.TABLE.equals(entity.getType()) ? HiveDataTypes.HIVE_TABLE.name() : HiveMetaStoreBridge.HDFS_PATH.toString();
return Entity.Type.TABLE.equals(entity.getType()) ? HiveDataTypes.HIVE_TABLE.name() : HiveMetaStoreBridge.HDFS_PATH;
}
private <T extends Entity> SortedMap<T, Referenceable> getSortedProcessDataSets(Set<T> inputTbls) {
SortedMap<T, Referenceable> inputs = new TreeMap<T, Referenceable>(entityComparator);
SortedMap<T, Referenceable> inputs = new TreeMap<>(entityComparator);
if (inputTbls != null) {
for (final T tbl : inputTbls) {
Referenceable inputTableRef = new Referenceable(getDSTypeName(tbl), new HashMap<String, Object>() {{
......
......@@ -18,6 +18,7 @@
package org.apache.atlas.storm.hook;
import org.apache.commons.lang.StringUtils;
import org.apache.storm.generated.Bolt;
import org.apache.storm.generated.GlobalStreamId;
import org.apache.storm.generated.Grouping;
......@@ -82,8 +83,7 @@ public final class StormTopologyUtil {
components.add(boltName);
components = removeSystemComponent ? removeSystemComponents(components)
: components;
if ((removeSystemComponent && !isSystemComponent(inputComponentId)) ||
!removeSystemComponent) {
if (!removeSystemComponent || !isSystemComponent(inputComponentId)) {
adjacencyMap.put(inputComponentId, components);
}
}
......@@ -132,7 +132,7 @@ public final class StormTopologyUtil {
Set<Object> objectsToSkip)
throws IllegalAccessException {
if (objectsToSkip == null) {
objectsToSkip = new HashSet<Object>();
objectsToSkip = new HashSet<>();
}
Map<String, String> output = new HashMap<>();
......@@ -175,9 +175,7 @@ public final class StormTopologyUtil {
String keyStr = getString(mapKey, false, objectsToSkip);
String valStr = getString(mapVal, false, objectsToSkip);
if ((valStr == null) || (valStr.isEmpty())) {
continue;
} else {
if (StringUtils.isNotEmpty(valStr)) {
output.put(String.format("%s.%s", key, keyStr), valStr);
}
}
......
......@@ -18,5 +18,5 @@
package org.apache.atlas.authorize;
public enum AtlasActionTypes {
READ, CREATE, UPDATE, DELETE;
READ, CREATE, UPDATE, DELETE
}
......@@ -25,18 +25,18 @@ public interface AtlasAuthorizer {
/**
* This method will load the policy file and would initialize the required data-structures.
*/
public void init();
void init();
/**
* This method is responsible to perform the actual authorization for every REST API call. It will check if
* user can perform action on resource.
*/
public boolean isAccessAllowed(AtlasAccessRequest request) throws AtlasAuthorizationException;
boolean isAccessAllowed(AtlasAccessRequest request) throws AtlasAuthorizationException;
/**
* This method is responsible to perform the cleanup and release activities. It must be called when you are done
* with the Authorization activity and once it's called a restart would be required. Try to invoke this while
* destroying the context.
*/
public void cleanUp();
void cleanUp();
}
......@@ -19,5 +19,5 @@
package org.apache.atlas.authorize;
public enum AtlasResourceTypes {
UNKNOWN, ENTITY, TYPE, OPERATION, TAXONOMY, TERM;
UNKNOWN, ENTITY, TYPE, OPERATION, TAXONOMY, TERM
}
......@@ -103,7 +103,7 @@ public class AtlasAuthorizationUtils {
* unprotected types are mapped with AtlasResourceTypes.UNKNOWN, access to these are allowed.
*/
public static Set<AtlasResourceTypes> getAtlasResourceType(String contextPath) {
Set<AtlasResourceTypes> resourceTypes = new HashSet<AtlasResourceTypes>();
Set<AtlasResourceTypes> resourceTypes = new HashSet<>();
if (isDebugEnabled) {
LOG.debug("==> getAtlasResourceType for " + contextPath);
}
......
......@@ -36,7 +36,7 @@ public class FileReaderUtil {
if (isDebugEnabled) {
LOG.debug("==> FileReaderUtil readFile");
}
List<String> list = new ArrayList<String>();
List<String> list = new ArrayList<>();
LOG.info("reading the file" + path);
List<String> fileLines = Files.readAllLines(Paths.get(path), Charset.forName("UTF-8"));
if (fileLines != null) {
......
......@@ -51,7 +51,7 @@ public class PolicyParser {
if (isDebugEnabled) {
LOG.debug("==> PolicyParser getListOfAutorities");
}
List<AtlasActionTypes> authorities = new ArrayList<AtlasActionTypes>();
List<AtlasActionTypes> authorities = new ArrayList<>();
for (int i = 0; i < auth.length(); i++) {
char access = auth.toLowerCase().charAt(i);
......@@ -86,7 +86,7 @@ public class PolicyParser {
if (isDebugEnabled) {
LOG.debug("==> PolicyParser parsePolicies");
}
List<PolicyDef> policyDefs = new ArrayList<PolicyDef>();
List<PolicyDef> policyDefs = new ArrayList<>();
for (String policy : policies) {
PolicyDef policyDef = parsePolicy(policy);
if (policyDef != null) {
......@@ -129,7 +129,7 @@ public class PolicyParser {
}
boolean isValidEntity = Pattern.matches("(.+:.+)+", entity);
boolean isEmpty = entity.isEmpty();
if (isValidEntity == false || isEmpty == true) {
if (!isValidEntity || isEmpty) {
if (isDebugEnabled) {
LOG.debug("group/user/resource not properly define in Policy");
LOG.debug("<== PolicyParser validateEntity");
......@@ -150,7 +150,7 @@ public class PolicyParser {
}
String[] users = usersDef.split(",");
String[] userAndRole = null;
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<String, List<AtlasActionTypes>>();
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<>();
if (validateEntity(usersDef)) {
for (String user : users) {
if (!Pattern.matches("(.+:.+)+", user)) {
......@@ -179,7 +179,7 @@ public class PolicyParser {
}
String[] groups = groupsDef.split("\\,");
String[] groupAndRole = null;
Map<String, List<AtlasActionTypes>> groupsMap = new HashMap<String, List<AtlasActionTypes>>();
Map<String, List<AtlasActionTypes>> groupsMap = new HashMap<>();
if (validateEntity(groupsDef.trim())) {
for (String group : groups) {
if (!Pattern.matches("(.+:.+)+", group)) {
......@@ -209,7 +209,7 @@ public class PolicyParser {
}
String[] resources = resourceDef.split(",");
String[] resourceTypeAndName = null;
Map<AtlasResourceTypes, List<String>> resourcesMap = new HashMap<AtlasResourceTypes, List<String>>();
Map<AtlasResourceTypes, List<String>> resourcesMap = new HashMap<>();
if (validateEntity(resourceDef)) {
for (String resource : resources) {
if (!Pattern.matches("(.+:.+)+", resource)) {
......@@ -238,7 +238,7 @@ public class PolicyParser {
List<String> resourceList = resourcesMap.get(resourceType);
if (resourceList == null) {
resourceList = new ArrayList<String>();
resourceList = new ArrayList<>();
}
resourceList.add(resourceTypeAndName[RESOURCE_NAME]);
resourcesMap.put(resourceType, resourceList);
......
......@@ -40,7 +40,7 @@ public class PolicyUtil {
+ " & " + principalType);
}
Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap =
new HashMap<String, Map<AtlasResourceTypes, List<String>>>();
new HashMap<>();
// Iterate over the list of policies to create map
for (PolicyDef policyDef : policyDefList) {
......@@ -63,7 +63,7 @@ public class PolicyUtil {
if (isDebugEnabled) {
LOG.debug("Resource list not found for " + username + ", creating it");
}
userResourceList = new HashMap<AtlasResourceTypes, List<String>>();
userResourceList = new HashMap<>();
}
/*
* Iterate over resources from the current policy def and update the resource list for the current user
......@@ -77,7 +77,7 @@ public class PolicyUtil {
if (resourceList == null) {
// if the resource list was not added for this type then
// create and add all the resources in this policy
resourceList = new ArrayList<String>();
resourceList = new ArrayList<>();
resourceList.addAll(resourceTypeMap.getValue());
} else {
// if the resource list is present then merge both the
......
......@@ -45,7 +45,7 @@ import com.google.common.annotations.VisibleForTesting;
public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
public enum AtlasAccessorTypes {
USER, GROUP;
USER, GROUP
}
private static final Logger LOG = LoggerFactory.getLogger(SimpleAtlasAuthorizer.class);
......@@ -133,8 +133,8 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
+ "\nResource :: " + resource);
boolean isAccessAllowed = false;
boolean isUser = user == null ? false : true;
boolean isGroup = groups == null ? false : true;
boolean isUser = user != null;
boolean isGroup = groups != null;
if ((!isUser && !isGroup) || action == null || resource == null) {
if (isDebugEnabled) {
......@@ -149,26 +149,22 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
case READ:
isAccessAllowed = checkAccess(user, resourceTypes, resource, userReadMap);
isAccessAllowed =
isAccessAllowed == false ? checkAccessForGroups(groups, resourceTypes, resource, groupReadMap)
: isAccessAllowed;
isAccessAllowed || checkAccessForGroups(groups, resourceTypes, resource, groupReadMap);
break;
case CREATE:
isAccessAllowed = checkAccess(user, resourceTypes, resource, userWriteMap);
isAccessAllowed =
isAccessAllowed == false ? checkAccessForGroups(groups, resourceTypes, resource, groupWriteMap)
: isAccessAllowed;
isAccessAllowed || checkAccessForGroups(groups, resourceTypes, resource, groupWriteMap);
break;
case UPDATE:
isAccessAllowed = checkAccess(user, resourceTypes, resource, userUpdateMap);
isAccessAllowed =
isAccessAllowed == false
? checkAccessForGroups(groups, resourceTypes, resource, groupUpdateMap) : isAccessAllowed;
isAccessAllowed || checkAccessForGroups(groups, resourceTypes, resource, groupUpdateMap);
break;
case DELETE:
isAccessAllowed = checkAccess(user, resourceTypes, resource, userDeleteMap);
isAccessAllowed =
isAccessAllowed == false
? checkAccessForGroups(groups, resourceTypes, resource, groupDeleteMap) : isAccessAllowed;
isAccessAllowed || checkAccessForGroups(groups, resourceTypes, resource, groupDeleteMap);
break;
default:
if (isDebugEnabled) {
......@@ -249,7 +245,7 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
boolean optWildCard = true;
List<String> policyValues = new ArrayList<String>();
List<String> policyValues = new ArrayList<>();
if (policyResource != null) {
boolean isWildCardPresent = !optWildCard;
......@@ -302,8 +298,7 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
}
}
if (isMatch == false) {
if (!isMatch) {
if (isDebugEnabled) {
StringBuilder sb = new StringBuilder();
sb.append("[");
......@@ -327,8 +322,7 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
}
private boolean isAllValuesRequested(String resource) {
boolean result = StringUtils.isEmpty(resource) || WILDCARD_ASTERISK.equals(resource);
return result;
return StringUtils.isEmpty(resource) || WILDCARD_ASTERISK.equals(resource);
}
@Override
......
......@@ -33,42 +33,42 @@ public class PolicyParserTest {
@Test
public void testParsePoliciesWithAllProperties() {
List<String> policies = new ArrayList<String>();
List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;entity:*abc,operation:*xyz,type:PII");
/* Creating group data */
Map<String, List<AtlasActionTypes>> groupMap = new HashMap<String, List<AtlasActionTypes>>();
List<AtlasActionTypes> accessList1 = new ArrayList<AtlasActionTypes>();
Map<String, List<AtlasActionTypes>> groupMap = new HashMap<>();
List<AtlasActionTypes> accessList1 = new ArrayList<>();
accessList1.add(AtlasActionTypes.READ);
accessList1.add(AtlasActionTypes.CREATE);
accessList1.add(AtlasActionTypes.UPDATE);
groupMap.put("grp1", accessList1);
List<AtlasActionTypes> accessList2 = new ArrayList<AtlasActionTypes>();
List<AtlasActionTypes> accessList2 = new ArrayList<>();
accessList2.add(AtlasActionTypes.UPDATE);
groupMap.put("grp2", accessList2);
/* Creating user data */
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<String, List<AtlasActionTypes>>();
List<AtlasActionTypes> usr1AccessList = new ArrayList<AtlasActionTypes>();
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<>();
List<AtlasActionTypes> usr1AccessList = new ArrayList<>();
usr1AccessList.add(AtlasActionTypes.READ);
usersMap.put("usr1", usr1AccessList);
List<AtlasActionTypes> usr2AccessList = new ArrayList<AtlasActionTypes>();
List<AtlasActionTypes> usr2AccessList = new ArrayList<>();
usr2AccessList.add(AtlasActionTypes.READ);
usr2AccessList.add(AtlasActionTypes.CREATE);
usersMap.put("usr2", usr2AccessList);
/* Creating resources data */
Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<AtlasResourceTypes, List<String>>();
List<String> resource1List = new ArrayList<String>();
Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<>();
List<String> resource1List = new ArrayList<>();
resource1List.add("*abc");
resourceMap.put(AtlasResourceTypes.ENTITY, resource1List);
List<String> resource2List = new ArrayList<String>();
List<String> resource2List = new ArrayList<>();
resource2List.add("*xyz");
resourceMap.put(AtlasResourceTypes.OPERATION, resource2List);
List<String> resource3List = new ArrayList<String>();
List<String> resource3List = new ArrayList<>();
resource3List.add("PII");
resourceMap.put(AtlasResourceTypes.TYPE, resource3List);
......@@ -86,34 +86,34 @@ public class PolicyParserTest {
@Test
public void testParsePoliciesWithOutUserProperties() {
List<String> policies = new ArrayList<String>();
List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;;;grp1:rwu,grp2:u;;entity:*abc,operation:*xyz,type:PII");
// Creating group data
Map<String, List<AtlasActionTypes>> groupMap = new HashMap<String, List<AtlasActionTypes>>();
List<AtlasActionTypes> accessList1 = new ArrayList<AtlasActionTypes>();
Map<String, List<AtlasActionTypes>> groupMap = new HashMap<>();
List<AtlasActionTypes> accessList1 = new ArrayList<>();
accessList1.add(AtlasActionTypes.READ);
accessList1.add(AtlasActionTypes.CREATE);
accessList1.add(AtlasActionTypes.UPDATE);
groupMap.put("grp1", accessList1);
List<AtlasActionTypes> accessList2 = new ArrayList<AtlasActionTypes>();
List<AtlasActionTypes> accessList2 = new ArrayList<>();
accessList2.add(AtlasActionTypes.UPDATE);
groupMap.put("grp2", accessList2);
// Creating user data
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<String, List<AtlasActionTypes>>();
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<>();
// Creating resources data
Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<AtlasResourceTypes, List<String>>();
List<String> resource1List = new ArrayList<String>();
Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<>();
List<String> resource1List = new ArrayList<>();
resource1List.add("*abc");
resourceMap.put(AtlasResourceTypes.ENTITY, resource1List);
List<String> resource2List = new ArrayList<String>();
List<String> resource2List = new ArrayList<>();
resource2List.add("*xyz");
resourceMap.put(AtlasResourceTypes.OPERATION, resource2List);
List<String> resource3List = new ArrayList<String>();
List<String> resource3List = new ArrayList<>();
resource3List.add("PII");
resourceMap.put(AtlasResourceTypes.TYPE, resource3List);
......@@ -131,33 +131,33 @@ public class PolicyParserTest {
@Test
public void testParsePoliciesWithOutGroupProperties() {
List<String> policies = new ArrayList<String>();
List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;;;entity:*abc,operation:*xyz,type:PII");
// Creating group data
Map<String, List<AtlasActionTypes>> groupMap = new HashMap<String, List<AtlasActionTypes>>();
Map<String, List<AtlasActionTypes>> groupMap = new HashMap<>();
// Creating user data
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<String, List<AtlasActionTypes>>();
List<AtlasActionTypes> usr1AccessList = new ArrayList<AtlasActionTypes>();
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<>();
List<AtlasActionTypes> usr1AccessList = new ArrayList<>();
usr1AccessList.add(AtlasActionTypes.READ);
usersMap.put("usr1", usr1AccessList);
List<AtlasActionTypes> usr2AccessList = new ArrayList<AtlasActionTypes>();
List<AtlasActionTypes> usr2AccessList = new ArrayList<>();
usr2AccessList.add(AtlasActionTypes.READ);
usr2AccessList.add(AtlasActionTypes.CREATE);
usersMap.put("usr2", usr2AccessList);
// Creating resources data
Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<AtlasResourceTypes, List<String>>();
List<String> resource1List = new ArrayList<String>();
Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<>();
List<String> resource1List = new ArrayList<>();
resource1List.add("*abc");
resourceMap.put(AtlasResourceTypes.ENTITY, resource1List);
List<String> resource2List = new ArrayList<String>();
List<String> resource2List = new ArrayList<>();
resource2List.add("*xyz");
resourceMap.put(AtlasResourceTypes.OPERATION, resource2List);
List<String> resource3List = new ArrayList<String>();
List<String> resource3List = new ArrayList<>();
resource3List.add("PII");
resourceMap.put(AtlasResourceTypes.TYPE, resource3List);
......
......@@ -35,24 +35,24 @@ public class PolicyUtilTest {
@Test
public void testCreatePermissionMap() {
HashMap<AtlasResourceTypes, List<String>> resourceMap = new HashMap<AtlasResourceTypes, List<String>>();
List<String> resource1List = new ArrayList<String>();
HashMap<AtlasResourceTypes, List<String>> resourceMap = new HashMap<>();
List<String> resource1List = new ArrayList<>();
resource1List.add("*abc");
resourceMap.put(AtlasResourceTypes.ENTITY, resource1List);
List<String> resource2List = new ArrayList<String>();
List<String> resource2List = new ArrayList<>();
resource2List.add("*xyz");
resourceMap.put(AtlasResourceTypes.OPERATION, resource2List);
List<String> resource3List = new ArrayList<String>();
List<String> resource3List = new ArrayList<>();
resource3List.add("PII");
resourceMap.put(AtlasResourceTypes.TYPE, resource3List);
Map<String, HashMap<AtlasResourceTypes, List<String>>> permissionMap =
new HashMap<String, HashMap<AtlasResourceTypes, List<String>>>();
new HashMap<>();
permissionMap.put("grp1", resourceMap);
List<String> policies = new ArrayList<String>();
List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;entity:*abc,operation:*xyz,type:PII");
List<PolicyDef> policyDefList = new PolicyParser().parsePolicies(policies);
......@@ -66,25 +66,25 @@ public class PolicyUtilTest {
@Test
public void testMergeCreatePermissionMap() {
HashMap<AtlasResourceTypes, List<String>> resourceMap = new HashMap<AtlasResourceTypes, List<String>>();
List<String> resource1List = new ArrayList<String>();
HashMap<AtlasResourceTypes, List<String>> resourceMap = new HashMap<>();
List<String> resource1List = new ArrayList<>();
resource1List.add("*abc");
resourceMap.put(AtlasResourceTypes.ENTITY, resource1List);
List<String> resource2List = new ArrayList<String>();
List<String> resource2List = new ArrayList<>();
resource2List.add("*x");
resource2List.add("*xyz");
resourceMap.put(AtlasResourceTypes.OPERATION, resource2List);
List<String> resource3List = new ArrayList<String>();
List<String> resource3List = new ArrayList<>();
resource3List.add("PII");
resourceMap.put(AtlasResourceTypes.TYPE, resource3List);
Map<String, HashMap<AtlasResourceTypes, List<String>>> permissionMap =
new HashMap<String, HashMap<AtlasResourceTypes, List<String>>>();
new HashMap<>();
permissionMap.put("grp1", resourceMap);
List<String> policies = new ArrayList<String>();
List<String> policies = new ArrayList<>();
policies.add("hivePolicys;;;;grp1:rwu;;entity:*abc,operation:*xyz,operation:*x");
policies.add("hivePolicy;;;;grp1:rwu;;entity:*abc,operation:*xyz");
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu;;entity:*abc,operation:*xyz");
......
......@@ -38,7 +38,7 @@ public class SimpleAtlasAuthorizerTest {
Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null;
Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null;
List<String> policies = new ArrayList<String>();
List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:*abc,type:PII");
List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies);
......@@ -50,13 +50,13 @@ public class SimpleAtlasAuthorizerTest {
userReadMap = policyUtil.createPermissionMap(policyDefs,
AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER);
Set<AtlasResourceTypes> resourceType = new HashSet<AtlasResourceTypes>();
Set<AtlasResourceTypes> resourceType = new HashSet<>();
resourceType.add(AtlasResourceTypes.TYPE);
String resource = "xsdfhjabc";
AtlasActionTypes action = AtlasActionTypes.READ;
String user = "usr1";
Set<String> userGroups = new HashSet<String>();
Set<String> userGroups = new HashSet<>();
userGroups.add("grp3");
try {
AtlasAccessRequest request = new AtlasAccessRequest(resourceType,
......@@ -83,7 +83,7 @@ public class SimpleAtlasAuthorizerTest {
Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null;
Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null;
List<String> policies = new ArrayList<String>();
List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:PII");
List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies);
......@@ -95,12 +95,12 @@ public class SimpleAtlasAuthorizerTest {
userReadMap = policyUtil.createPermissionMap(policyDefs,
AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER);
Set<AtlasResourceTypes> resourceType = new HashSet<AtlasResourceTypes>();
Set<AtlasResourceTypes> resourceType = new HashSet<>();
resourceType.add(AtlasResourceTypes.TYPE);
String resource = "PII";
AtlasActionTypes action = AtlasActionTypes.READ;
String user = "usr3";
Set<String> userGroups = new HashSet<String>();
Set<String> userGroups = new HashSet<>();
userGroups.add("grp1");
AtlasAccessRequest request = new AtlasAccessRequest(resourceType,
resource, action, user, userGroups);
......@@ -126,7 +126,7 @@ public class SimpleAtlasAuthorizerTest {
Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null;
Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null;
List<String> policies = new ArrayList<String>();
List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:PII");
List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies);
......@@ -138,12 +138,12 @@ public class SimpleAtlasAuthorizerTest {
userReadMap = policyUtil.createPermissionMap(policyDefs,
AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER);
Set<AtlasResourceTypes> resourceType = new HashSet<AtlasResourceTypes>();
Set<AtlasResourceTypes> resourceType = new HashSet<>();
resourceType.add(AtlasResourceTypes.TYPE);
String resource = "abc";
AtlasActionTypes action = AtlasActionTypes.READ;
String user = "usr1";
Set<String> userGroups = new HashSet<String>();
Set<String> userGroups = new HashSet<>();
userGroups.add("grp1");
AtlasAccessRequest request = new AtlasAccessRequest(resourceType,
resource, action, user, userGroups);
......@@ -168,7 +168,7 @@ public class SimpleAtlasAuthorizerTest {
Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null;
Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null;
List<String> policies = new ArrayList<String>();
List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:PII");
List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies);
......@@ -180,12 +180,12 @@ public class SimpleAtlasAuthorizerTest {
userReadMap = policyUtil.createPermissionMap(policyDefs,
AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER);
Set<AtlasResourceTypes> resourceType = new HashSet<AtlasResourceTypes>();
Set<AtlasResourceTypes> resourceType = new HashSet<>();
resourceType.add(AtlasResourceTypes.TYPE);
String resource = "PII";
AtlasActionTypes action = AtlasActionTypes.READ;
String user = "usr3";
Set<String> userGroups = new HashSet<String>();
Set<String> userGroups = new HashSet<>();
userGroups.add("grp3");
AtlasAccessRequest request = new AtlasAccessRequest(resourceType,
resource, action, user, userGroups);
......
......@@ -23,8 +23,6 @@ import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.QueryFactory;
import java.util.Collections;
/**
* Base class for resource providers.
*/
......
......@@ -85,7 +85,7 @@ public class DefaultTypeSystem implements AtlasTypeSystem {
} catch(TypeNotFoundException tne) {
//Type not found . Create
TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(type),
ImmutableList.of(type),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
metadataService.createType(TypesSerialization.toJson(typesDef));
}
......@@ -198,10 +198,10 @@ public class DefaultTypeSystem implements AtlasTypeSystem {
try {
HierarchicalTypeDefinition<T> definition = null;
if ( isTrait) {
definition = new HierarchicalTypeDefinition<T>(type, name, description,
ImmutableSet.<String>of(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE), attributes.toArray(new AttributeDefinition[attributes.size()]));
definition = new HierarchicalTypeDefinition<>(type, name, description,
ImmutableSet.of(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE), attributes.toArray(new AttributeDefinition[attributes.size()]));
} else {
definition = new HierarchicalTypeDefinition<T>(type, name, description,
definition = new HierarchicalTypeDefinition<>(type, name, description,
ImmutableSet.<String>of(), attributes.toArray(new AttributeDefinition[attributes.size()]));
}
......
......@@ -20,10 +20,6 @@ package org.apache.atlas.catalog;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.Collections;
/**
* Wrapper for term vertices.
......
......@@ -20,7 +20,6 @@ package org.apache.atlas.catalog.definition;
import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.AtlasConstants;
import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TaxonomyResourceProvider;
import org.apache.atlas.catalog.VertexWrapper;
......
......@@ -88,7 +88,7 @@ public interface QueryExpression {
*
* @param fieldName field name
*/
public void setField(String fieldName);
void setField(String fieldName);
/**
* Get the expected value for the expression.
......
......@@ -30,8 +30,6 @@ import org.testng.annotations.Test;
import java.util.*;
import static org.easymock.EasyMock.*;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.replay;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
......
......@@ -28,6 +28,8 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.configuration.Configuration;
import java.util.Arrays;
/**
* An application that allows users to run admin commands against an Atlas server.
......@@ -83,7 +85,7 @@ public class AtlasAdminClient {
System.out.println(atlasClient.getAdminStatus());
cmdStatus = 0;
} catch (AtlasServiceException e) {
System.err.println("Could not retrieve status of the server at " + atlasServerUri);
System.err.println("Could not retrieve status of the server at " + Arrays.toString(atlasServerUri));
printStandardHttpErrorDetails(e);
}
} else {
......
......@@ -30,7 +30,6 @@ import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator;
import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator;
import org.apache.hadoop.security.token.delegation.web.PseudoDelegationTokenAuthenticator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -56,7 +55,7 @@ import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_FILE_KEY;
*/
public class SecureClientUtils {
public final static int DEFAULT_SOCKET_TIMEOUT = 1 * 60 * 1000; // 1 minute
public final static int DEFAULT_SOCKET_TIMEOUT_IN_MSECS = 1 * 60 * 1000; // 1 minute
private static final Logger LOG = LoggerFactory.getLogger(SecureClientUtils.class);
......@@ -120,14 +119,14 @@ public class SecureClientUtils {
private final static ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR = new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn) throws IOException {
setTimeouts(conn, DEFAULT_SOCKET_TIMEOUT);
setTimeouts(conn, DEFAULT_SOCKET_TIMEOUT_IN_MSECS);
return conn;
}
};
private static ConnectionConfigurator newConnConfigurator(Configuration conf) {
try {
return newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
return newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT_IN_MSECS, conf);
} catch (Exception e) {
LOG.debug("Cannot load customized ssl related configuration. " + "Fallback to system-generic settings.", e);
return DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
......
......@@ -107,8 +107,7 @@ public class AtlasClientTest {
private WebResource.Builder setupBuilder(AtlasClient.API api, WebResource webResource) {
when(webResource.path(api.getPath())).thenReturn(service);
WebResource.Builder builder = getBuilder(service);
return builder;
return getBuilder(service);
}
@Test
......
......@@ -32,7 +32,7 @@ public class FunctionCallExpression extends AbstractGroovyExpression {
private GroovyExpression target;
private String functionName;
private List<GroovyExpression> arguments = new ArrayList<GroovyExpression>();
private List<GroovyExpression> arguments = new ArrayList<>();
public FunctionCallExpression(String functionName, List<? extends GroovyExpression> arguments) {
this.target = null;
......
......@@ -231,7 +231,7 @@ public final class InMemoryJAASConfiguration extends Configuration {
String clientId = tokenizer.nextToken();
SortedSet<Integer> indexList = jaasClients.get(clientId);
if (indexList == null) {
indexList = new TreeSet<Integer>();
indexList = new TreeSet<>();
jaasClients.put(clientId, indexList);
}
String indexStr = tokenizer.nextToken();
......@@ -275,20 +275,26 @@ public final class InMemoryJAASConfiguration extends Configuration {
AppConfigurationEntry.LoginModuleControlFlag loginControlFlag = null;
if (controlFlag != null) {
controlFlag = controlFlag.trim().toLowerCase();
if (controlFlag.equals("optional")) {
switch (controlFlag) {
case "optional":
loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL;
} else if (controlFlag.equals("requisite")) {
break;
case "requisite":
loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUISITE;
} else if (controlFlag.equals("sufficient")) {
break;
case "sufficient":
loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT;
} else if (controlFlag.equals("required")) {
break;
case "required":
loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED;
} else {
break;
default:
String validValues = "optional|requisite|sufficient|required";
LOG.warn("Unknown JAAS configuration value for (" + keyParam
+ ") = [" + controlFlag + "], valid value are [" + validValues
+ "] using the default value, REQUIRED");
loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED;
break;
}
} else {
LOG.warn("Unable to find JAAS configuration ("
......@@ -336,7 +342,7 @@ public final class InMemoryJAASConfiguration extends Configuration {
List<AppConfigurationEntry> retList = applicationConfigEntryMap.get(jaasClient);
if (retList == null) {
retList = new ArrayList<AppConfigurationEntry>();
retList = new ArrayList<>();
applicationConfigEntryMap.put(jaasClient, retList);
}
......
......@@ -46,13 +46,7 @@ public final class AuthenticationUtil {
}
public static boolean isKerberosAuthenticationEnabled(Configuration atlasConf) {
boolean isKerberosAuthenticationEnabled;
if ("true".equalsIgnoreCase(atlasConf.getString("atlas.authentication.method.kerberos"))) {
isKerberosAuthenticationEnabled = true;
} else {
isKerberosAuthenticationEnabled = false;
}
return isKerberosAuthenticationEnabled;
return atlasConf.getBoolean("atlas.authentication.method.kerberos", false);
}
public static String[] getBasicAuthenticationInput() {
......
......@@ -32,9 +32,9 @@ import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
* Util class for Properties.
*/
public final class PropertiesUtil extends PropertyPlaceholderConfigurer {
private static Map<String, String> propertiesMap = new HashMap<String, String>();
private static Map<String, String> propertiesMap = new HashMap<>();
private static Logger logger = Logger.getLogger(PropertiesUtil.class);
protected List<String> xmlPropertyConfigurer = new ArrayList<String>();
protected List<String> xmlPropertyConfigurer = new ArrayList<>();
private PropertiesUtil() {
......
......@@ -103,11 +103,11 @@ define(['require',
this.tagsCollection();
}, this);
this.listenTo(this.commonCollection, 'reset', function() {
--this.asyncAttrFetchCounter
--this.asyncAttrFetchCounter;
this.subAttributeData();
}, this);
this.listenTo(this.commonCollection, 'error', function() {
--this.asyncAttrFetchCounter
--this.asyncAttrFetchCounter;
this.$('.attrLoader').hide();
}, this);
},
......
......@@ -50,7 +50,7 @@ public interface AtlasGraphQuery<V, E> {
* @param value
* @return
*/
AtlasGraphQuery<V, E> in(String propertyKey, Collection<? extends Object> values);
AtlasGraphQuery<V, E> in(String propertyKey, Collection<?> values);
/**
......
......@@ -41,7 +41,7 @@ public interface AtlasIndexQuery<V, E> {
* @param <V>
* @param <E>
*/
public interface Result<V, E> {
interface Result<V, E> {
/**
* Gets the vertex for this result.
......
......@@ -46,7 +46,7 @@ public interface NativeTitanGraphQuery<V, E> {
* @param propertyName
* @param values
*/
void in(String propertyName, Collection<? extends Object> values);
void in(String propertyName, Collection<?> values);
/**
* Adds a has condition to the query.
......
......@@ -144,7 +144,7 @@ public abstract class TitanGraphQuery<V, E> implements AtlasGraphQuery<V, E> {
@Override
public AtlasGraphQuery<V, E> in(String propertyKey, Collection<? extends Object> values) {
public AtlasGraphQuery<V, E> in(String propertyKey, Collection<?> values) {
queryCondition.andWith(new InPredicate(propertyKey, values));
return this;
}
......
......@@ -28,9 +28,9 @@ import org.apache.atlas.repository.graphdb.titan.query.NativeTitanGraphQuery;
public class InPredicate implements QueryPredicate {
private String propertyName;
private Collection<? extends Object> values;
private Collection<?> values;
public InPredicate(String propertyName, Collection<? extends Object> values) {
public InPredicate(String propertyName, Collection<?> values) {
super();
this.propertyName = propertyName;
this.values = values;
......
......@@ -43,7 +43,7 @@ public class OrCondition {
}
public OrCondition(boolean addInitialTerm) {
this.children = new ArrayList<AndCondition>();
this.children = new ArrayList<>();
if (addInitialTerm) {
children.add(new AndCondition());
}
......@@ -96,7 +96,7 @@ public class OrCondition {
//it creates a new AndCondition that combines the two AndConditions together. These combined
//AndConditions become the new set of AndConditions in this OrCondition.
List<AndCondition> expandedExpressionChildren = new ArrayList<AndCondition>();
List<AndCondition> expandedExpressionChildren = new ArrayList<>();
for (AndCondition otherExprTerm : other.getAndTerms()) {
for (AndCondition currentExpr : children) {
AndCondition currentAndConditionCopy = currentExpr.copy();
......
......@@ -36,7 +36,7 @@ public interface HBaseCompat {
* @param algo
* compression type to use
*/
public void setCompression(HColumnDescriptor cd, String algo);
void setCompression(HColumnDescriptor cd, String algo);
/**
* Create and return a HTableDescriptor instance with the given name. The
......@@ -50,7 +50,7 @@ public interface HBaseCompat {
* HBase table name
* @return a new table descriptor instance
*/
public HTableDescriptor newTableDescriptor(String tableName);
HTableDescriptor newTableDescriptor(String tableName);
ConnectionMask createConnection(Configuration conf) throws IOException;
......
......@@ -67,11 +67,7 @@ public class HBaseCompatLoader {
try {
compat = (HBaseCompat)Class.forName(className).newInstance();
log.info("Instantiated HBase compatibility layer {}: {}", classNameSource, compat.getClass().getCanonicalName());
} catch (IllegalAccessException e) {
throw new RuntimeException(e.getClass().getSimpleName() + errTemplate, e);
} catch (InstantiationException e) {
throw new RuntimeException(e.getClass().getSimpleName() + errTemplate, e);
} catch (ClassNotFoundException e) {
} catch (IllegalAccessException | ClassNotFoundException | InstantiationException e) {
throw new RuntimeException(e.getClass().getSimpleName() + errTemplate, e);
}
......
......@@ -48,7 +48,6 @@ import javax.annotation.Nullable;
import java.io.Closeable;
import java.io.IOException;
import java.util.*;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
......@@ -151,7 +150,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore {
void handleLockFailure(StoreTransaction txh, KeyColumn lockID, int trialCount) throws PermanentLockingException {
if (trialCount < lockMaxRetries) {
try {
Thread.sleep(lockMaxWaitTimeMs.getLength(TimeUnit.DAYS.MILLISECONDS));
Thread.sleep(lockMaxWaitTimeMs.getLength(TimeUnit.MILLISECONDS));
} catch (InterruptedException e) {
throw new PermanentLockingException(
"Interrupted while waiting for acquiring lock for transaction "
......@@ -199,7 +198,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore {
}
private Map<StaticBuffer,EntryList> getHelper(List<StaticBuffer> keys, Filter getFilter) throws BackendException {
List<Get> requests = new ArrayList<Get>(keys.size());
List<Get> requests = new ArrayList<>(keys.size());
{
for (StaticBuffer key : keys) {
Get g = new Get(key.as(StaticBuffer.ARRAY_FACTORY)).addFamily(columnFamilyBytes).setFilter(getFilter);
......@@ -212,7 +211,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore {
}
}
Map<StaticBuffer,EntryList> resultMap = new HashMap<StaticBuffer,EntryList>(keys.size());
Map<StaticBuffer,EntryList> resultMap = new HashMap<>(keys.size());
try {
TableMask table = null;
......@@ -336,7 +335,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore {
@Override
public boolean hasNext() {
ensureOpen();
return kv == null ? false : kv.hasNext();
return kv != null && kv.hasNext();
}
@Override
......
......@@ -99,21 +99,21 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
new ConfigNamespace(GraphDatabaseConfiguration.STORAGE_NS, "hbase", "HBase storage options");
public static final ConfigOption<Boolean> SHORT_CF_NAMES =
new ConfigOption<Boolean>(HBASE_NS, "short-cf-names",
new ConfigOption<>(HBASE_NS, "short-cf-names",
"Whether to shorten the names of Titan's column families to one-character mnemonics " +
"to conserve storage space", ConfigOption.Type.FIXED, true);
public static final String COMPRESSION_DEFAULT = "-DEFAULT-";
public static final ConfigOption<String> COMPRESSION =
new ConfigOption<String>(HBASE_NS, "compression-algorithm",
new ConfigOption<>(HBASE_NS, "compression-algorithm",
"An HBase Compression.Algorithm enum string which will be applied to newly created column families. " +
"The compression algorithm must be installed and available on the HBase cluster. Titan cannot install " +
"and configure new compression algorithms on the HBase cluster by itself.",
ConfigOption.Type.MASKABLE, "GZ");
public static final ConfigOption<Boolean> SKIP_SCHEMA_CHECK =
new ConfigOption<Boolean>(HBASE_NS, "skip-schema-check",
new ConfigOption<>(HBASE_NS, "skip-schema-check",
"Assume that Titan's HBase table and column families already exist. " +
"When this is true, Titan will not check for the existence of its table/CFs, " +
"nor will it attempt to create them under any circumstances. This is useful " +
......@@ -121,7 +121,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
ConfigOption.Type.MASKABLE, false);
public static final ConfigOption<String> HBASE_TABLE =
new ConfigOption<String>(HBASE_NS, "table",
new ConfigOption<>(HBASE_NS, "table",
"The name of the table Titan will use. When " + ConfigElement.getPath(SKIP_SCHEMA_CHECK) +
" is false, Titan will automatically create this table if it does not already exist.",
ConfigOption.Type.LOCAL, "titan");
......@@ -139,7 +139,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
* Titan connects to an HBase backend for the first time.
*/
public static final ConfigOption<Integer> REGION_COUNT =
new ConfigOption<Integer>(HBASE_NS, "region-count",
new ConfigOption<>(HBASE_NS, "region-count",
"The number of initial regions set when creating Titan's HBase table",
ConfigOption.Type.MASKABLE, Integer.class, new Predicate<Integer>() {
@Override
......@@ -183,7 +183,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
* These considerations may differ for other HBase implementations (e.g. MapR).
*/
public static final ConfigOption<Integer> REGIONS_PER_SERVER =
new ConfigOption<Integer>(HBASE_NS, "regions-per-server",
new ConfigOption<>(HBASE_NS, "regions-per-server",
"The number of regions per regionserver to set when creating Titan's HBase table",
ConfigOption.Type.MASKABLE, Integer.class);
......@@ -217,7 +217,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
*
*/
public static final ConfigOption<String> COMPAT_CLASS =
new ConfigOption<String>(HBASE_NS, "compat-class",
new ConfigOption<>(HBASE_NS, "compat-class",
"The package and class name of the HBaseCompat implementation. HBaseCompat masks version-specific HBase API differences. " +
"When this option is unset, Titan calls HBase's VersionInfo.getVersion() and loads the matching compat class " +
"at runtime. Setting this option forces Titan to instead reflectively load and instantiate the specified class.",
......@@ -266,7 +266,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
private final HBaseCompat compat;
private static final ConcurrentHashMap<HBaseStoreManager, Throwable> openManagers =
new ConcurrentHashMap<HBaseStoreManager, Throwable>();
new ConcurrentHashMap<>();
// Mutable instance state
private final ConcurrentMap<String, HBaseKeyColumnValueStore> openStores;
......@@ -342,7 +342,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
}
logger.debug("End of HBase config key=value pairs");
openStores = new ConcurrentHashMap<String, HBaseKeyColumnValueStore>();
openStores = new ConcurrentHashMap<>();
}
@Override
......@@ -420,7 +420,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
commitTime.getAdditionTime(times.getUnit()),
commitTime.getDeletionTime(times.getUnit()));
List<Row> batch = new ArrayList<Row>(commandsPerKey.size()); // actual batch operation
List<Row> batch = new ArrayList<>(commandsPerKey.size()); // actual batch operation
// convert sorted commands into representation required for 'batch' operation
for (Pair<Put, Delete> commands : commandsPerKey.values()) {
......@@ -442,9 +442,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
} finally {
IOUtils.closeQuietly(table);
}
} catch (IOException e) {
throw new TemporaryBackendException(e);
} catch (InterruptedException e) {
} catch (IOException | InterruptedException e) {
throw new TemporaryBackendException(e);
}
......@@ -466,7 +464,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
final String cfName = shortCfNames ? shortenCfName(longName) : longName;
final String llmPrefix = getName();
llm = LocalLockMediators.INSTANCE.<StoreTransaction>get(llmPrefix, times);
llm = LocalLockMediators.INSTANCE.get(llmPrefix, times);
HBaseKeyColumnValueStore newStore = new HBaseKeyColumnValueStore(this, cnx, tableName, cfName, longName, llm);
store = openStores.putIfAbsent(longName, newStore); // nothing bad happens if we loose to other thread
......@@ -511,7 +509,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
@Override
public List<KeyRange> getLocalKeyPartition() throws BackendException {
List<KeyRange> result = new LinkedList<KeyRange>();
List<KeyRange> result = new LinkedList<>();
TableMask table = null;
try {
......@@ -645,7 +643,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
}
// Require either no null key bounds or a pair of them
Preconditions.checkState(!(null == nullStart ^ null == nullEnd));
Preconditions.checkState((null == nullStart) == (null == nullEnd));
// Check that every key in the result is at least 4 bytes long
Map<KeyRange, ServerName> result = b.build();
......@@ -675,8 +673,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
byte padded[] = new byte[targetLength];
for (int i = 0; i < dataToPad.length; i++)
padded[i] = dataToPad[i];
System.arraycopy(dataToPad, 0, padded, 0, dataToPad.length);
for (int i = dataToPad.length; i < padded.length; i++)
padded[i] = (byte)0;
......@@ -856,7 +853,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
private Map<StaticBuffer, Pair<Put, Delete>> convertToCommands(Map<String, Map<StaticBuffer, KCVMutation>> mutations,
final long putTimestamp,
final long delTimestamp) throws PermanentBackendException {
Map<StaticBuffer, Pair<Put, Delete>> commandsPerKey = new HashMap<StaticBuffer, Pair<Put, Delete>>();
Map<StaticBuffer, Pair<Put, Delete>> commandsPerKey = new HashMap<>();
for (Map.Entry<String, Map<StaticBuffer, KCVMutation>> entry : mutations.entrySet()) {
......@@ -870,7 +867,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
Pair<Put, Delete> commands = commandsPerKey.get(m.getKey());
if (commands == null) {
commands = new Pair<Put, Delete>();
commands = new Pair<>();
commandsPerKey.put(m.getKey(), commands);
}
......@@ -928,7 +925,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
* Similar to {@link Function}, except that the {@code apply} method is allowed
* to throw {@link BackendException}.
*/
private static interface BackendFunction<F, T> {
private interface BackendFunction<F, T> {
T apply(F input) throws BackendException;
}
......
......@@ -77,7 +77,7 @@ public class LocalLockMediator<T> {
* according to {@link AuditRecord#expires}, in which case the lock should
* be considered invalid.
*/
private final ConcurrentHashMap<KeyColumn, AuditRecord<T>> locks = new ConcurrentHashMap<KeyColumn, AuditRecord<T>>();
private final ConcurrentHashMap<KeyColumn, AuditRecord<T>> locks = new ConcurrentHashMap<>();
public LocalLockMediator(String name, TimestampProvider times) {
this.name = name;
......@@ -125,7 +125,7 @@ public class LocalLockMediator<T> {
assert null != kc;
assert null != requestor;
AuditRecord<T> audit = new AuditRecord<T>(requestor, expires);
AuditRecord<T> audit = new AuditRecord<>(requestor, expires);
AuditRecord<T> inmap = locks.putIfAbsent(kc, audit);
boolean success = false;
......@@ -134,7 +134,7 @@ public class LocalLockMediator<T> {
// Uncontended lock succeeded
if (log.isTraceEnabled()) {
log.trace("New local lock created: {} namespace={} txn={}",
new Object[]{kc, name, requestor});
kc, name, requestor);
}
success = true;
} else if (inmap.equals(audit)) {
......@@ -144,13 +144,13 @@ public class LocalLockMediator<T> {
if (success) {
log.trace(
"Updated local lock expiration: {} namespace={} txn={} oldexp={} newexp={}",
new Object[]{kc, name, requestor, inmap.expires,
audit.expires});
kc, name, requestor, inmap.expires,
audit.expires);
} else {
log.trace(
"Failed to update local lock expiration: {} namespace={} txn={} oldexp={} newexp={}",
new Object[]{kc, name, requestor, inmap.expires,
audit.expires});
kc, name, requestor, inmap.expires,
audit.expires);
}
}
} else if (0 > inmap.expires.compareTo(times.getTime())) {
......@@ -159,14 +159,14 @@ public class LocalLockMediator<T> {
if (log.isTraceEnabled()) {
log.trace(
"Discarding expired lock: {} namespace={} txn={} expired={}",
new Object[]{kc, name, inmap.holder, inmap.expires});
kc, name, inmap.holder, inmap.expires);
}
} else {
// we lost to a valid lock
if (log.isTraceEnabled()) {
log.trace(
"Local lock failed: {} namespace={} txn={} (already owned by {})",
new Object[]{kc, name, requestor, inmap});
kc, name, requestor, inmap);
}
}
......@@ -190,13 +190,13 @@ public class LocalLockMediator<T> {
return false;
}
AuditRecord<T> unlocker = new AuditRecord<T>(requestor, null);
AuditRecord<T> unlocker = new AuditRecord<>(requestor, null);
AuditRecord<T> holder = locks.get(kc);
if (!holder.equals(unlocker)) {
log.error("Local unlock of {} by {} failed: it is held by {}",
new Object[]{kc, unlocker, holder});
kc, unlocker, holder);
return false;
}
......@@ -206,7 +206,7 @@ public class LocalLockMediator<T> {
expiryQueue.remove(kc);
if (log.isTraceEnabled()) {
log.trace("Local unlock succeeded: {} namespace={} txn={}",
new Object[]{kc, name, requestor});
kc, name, requestor);
}
} else {
log.warn("Local unlock warning: lock record for {} disappeared "
......
......@@ -19,7 +19,6 @@ import com.google.common.base.Predicate;
import com.google.common.collect.*;
import com.thinkaurelius.titan.core.*;
import com.thinkaurelius.titan.core.attribute.Cmp;
import com.thinkaurelius.titan.core.Cardinality;
import com.thinkaurelius.titan.core.schema.SchemaStatus;
import com.thinkaurelius.titan.core.schema.TitanSchemaType;
import com.thinkaurelius.titan.graphdb.database.IndexSerializer;
......@@ -78,7 +77,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
Preconditions.checkNotNull(serializer);
this.tx = tx;
this.serializer = serializer;
this.constraints = new ArrayList<PredicateCondition<String, TitanElement>>(5);
this.constraints = new ArrayList<>(5);
}
/* ---------------------------------------------------------------
......@@ -90,7 +89,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
Preconditions.checkNotNull(key);
Preconditions.checkNotNull(predicate);
Preconditions.checkArgument(predicate.isValidCondition(condition), "Invalid condition: %s", condition);
constraints.add(new PredicateCondition<String, TitanElement>(key, predicate, condition));
constraints.add(new PredicateCondition<>(key, predicate, condition));
return this;
}
......@@ -172,19 +171,19 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
@Override
public Iterable<Vertex> vertices() {
GraphCentricQuery query = constructQuery(ElementCategory.VERTEX);
return Iterables.filter(new QueryProcessor<GraphCentricQuery, TitanElement, JointIndexQuery>(query, tx.elementProcessor), Vertex.class);
return Iterables.filter(new QueryProcessor<>(query, tx.elementProcessor), Vertex.class);
}
@Override
public Iterable<Edge> edges() {
GraphCentricQuery query = constructQuery(ElementCategory.EDGE);
return Iterables.filter(new QueryProcessor<GraphCentricQuery, TitanElement, JointIndexQuery>(query, tx.elementProcessor), Edge.class);
return Iterables.filter(new QueryProcessor<>(query, tx.elementProcessor), Edge.class);
}
@Override
public Iterable<TitanProperty> properties() {
GraphCentricQuery query = constructQuery(ElementCategory.PROPERTY);
return Iterables.filter(new QueryProcessor<GraphCentricQuery, TitanElement, JointIndexQuery>(query, tx.elementProcessor), TitanProperty.class);
return Iterables.filter(new QueryProcessor<>(query, tx.elementProcessor), TitanProperty.class);
}
private QueryDescription describe(ElementCategory category) {
......@@ -232,7 +231,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
if (orders.isEmpty()) orders = OrderList.NO_ORDER;
//Compile all indexes that cover at least one of the query conditions
final Set<IndexType> indexCandidates = new HashSet<IndexType>();
final Set<IndexType> indexCandidates = new HashSet<>();
ConditionUtil.traversal(conditions, new Predicate<Condition<TitanElement>>() {
@Override
public boolean apply(@Nullable Condition<TitanElement> condition) {
......@@ -281,7 +280,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
log.warn("The query optimizer currently does not support multiple label constraints in query: {}", this);
continue;
}
if (!type.getName().equals((String)Iterables.getOnlyElement(labels))) continue;
if (!type.getName().equals(Iterables.getOnlyElement(labels))) continue;
subcover.add(equalCon.getKey());
}
......@@ -345,9 +344,9 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
}
indexLimit = Math.min(HARD_MAX_LIMIT, QueryUtil.adjustLimitForTxModifications(tx, coveredClauses.size(), indexLimit));
jointQuery.setLimit(indexLimit);
query = new BackendQueryHolder<JointIndexQuery>(jointQuery, coveredClauses.size()==conditions.numChildren(), isSorted, null);
query = new BackendQueryHolder<>(jointQuery, coveredClauses.size() == conditions.numChildren(), isSorted, null);
} else {
query = new BackendQueryHolder<JointIndexQuery>(new JointIndexQuery(), false, isSorted, null);
query = new BackendQueryHolder<>(new JointIndexQuery(), false, isSorted, null);
}
return new GraphCentricQuery(resultType, conditions, orders, query, limit);
......@@ -366,8 +365,8 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
if (index.getStatus()!= SchemaStatus.ENABLED) return null;
IndexField[] fields = index.getFieldKeys();
Object[] indexValues = new Object[fields.length];
Set<Condition> coveredClauses = new HashSet<Condition>(fields.length);
List<Object[]> indexCovers = new ArrayList<Object[]>(4);
Set<Condition> coveredClauses = new HashSet<>(fields.length);
List<Object[]> indexCovers = new ArrayList<>(4);
constructIndexCover(indexValues, 0, fields, condition, indexCovers, coveredClauses);
if (!indexCovers.isEmpty()) {
......@@ -384,7 +383,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
} else {
IndexField field = fields[position];
Map.Entry<Condition, Collection<Object>> equalCon = getEqualityConditionValues(condition, field.getFieldKey());
if (equalCon!=null) {
if (equalCon != null) {
coveredClauses.add(equalCon.getKey());
assert equalCon.getValue().size()>0;
for (Object value : equalCon.getValue()) {
......@@ -392,7 +391,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
newValues[position]=value;
constructIndexCover(newValues, position+1, fields, condition, indexCovers, coveredClauses);
}
} else return;
}
}
}
......@@ -419,7 +418,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
final IndexSerializer indexInfo, final Set<Condition> covered) {
assert QueryUtil.isQueryNormalForm(condition);
assert condition instanceof And;
And<TitanElement> subcondition = new And<TitanElement>(condition.numChildren());
And<TitanElement> subcondition = new And<>(condition.numChildren());
for (Condition<TitanElement> subclause : condition.getChildren()) {
if (coversAll(index, subclause, indexInfo)) {
subcondition.add(subclause);
......@@ -439,9 +438,9 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
PropertyKey key = (PropertyKey) atom.getKey();
ParameterIndexField[] fields = index.getFieldKeys();
ParameterIndexField match = null;
for (int i = 0; i < fields.length; i++) {
if (fields[i].getStatus()!= SchemaStatus.ENABLED) continue;
if (fields[i].getFieldKey().equals(key)) match = fields[i];
for (ParameterIndexField field : fields) {
if (field.getStatus() != SchemaStatus.ENABLED) continue;
if (field.getFieldKey().equals(key)) match = field;
}
if (match==null) return false;
return indexInfo.supports(index, match, atom.getPredicate());
......
......@@ -319,12 +319,12 @@ public class Titan0Graph implements AtlasGraph<Titan0Vertex, Titan0Edge> {
public Iterable<AtlasEdge<Titan0Vertex, Titan0Edge>> wrapEdges(Iterator<Edge> it) {
Iterable<Edge> iterable = new IteratorToIterableAdapter<Edge>(it);
Iterable<Edge> iterable = new IteratorToIterableAdapter<>(it);
return wrapEdges(iterable);
}
public Iterable<AtlasVertex<Titan0Vertex, Titan0Edge>> wrapVertices(Iterator<Vertex> it) {
Iterable<Vertex> iterable = new IteratorToIterableAdapter<Vertex>(it);
Iterable<Vertex> iterable = new IteratorToIterableAdapter<>(it);
return wrapVertices(iterable);
}
......@@ -341,7 +341,7 @@ public class Titan0Graph implements AtlasGraph<Titan0Vertex, Titan0Edge> {
}
public Iterable<AtlasEdge<Titan0Vertex, Titan0Edge>> wrapEdges(Iterable<Edge> it) {
Iterable<Edge> result = (Iterable<Edge>)it;
Iterable<Edge> result = it;
return Iterables.transform(result, new Function<Edge, AtlasEdge<Titan0Vertex, Titan0Edge>>(){
@Override
......
......@@ -86,7 +86,7 @@ public class Titan0GraphIndex implements AtlasGraphIndex {
@Override
public Set<AtlasPropertyKey> getFieldKeys() {
PropertyKey[] keys = wrappedIndex.getFieldKeys();
Set<AtlasPropertyKey> result = new HashSet<AtlasPropertyKey>();
Set<AtlasPropertyKey> result = new HashSet<>();
for(PropertyKey key : keys) {
result.add(GraphDbObjectFactory.createPropertyKey(key));
}
......
......@@ -103,7 +103,7 @@ public class Titan0Vertex extends Titan0Element<Vertex> implements AtlasVertex<T
public <T> Collection<T> getPropertyValues(String key, Class<T> clazz) {
TitanVertex tv = getAsTitanVertex();
Collection<T> result = new ArrayList<T>();
Collection<T> result = new ArrayList<>();
for (TitanProperty property : tv.getProperties(key)) {
result.add((T) property.getValue());
}
......
......@@ -56,7 +56,7 @@ public class NativeTitan0GraphQuery implements NativeTitanGraphQuery<Titan0Verte
@Override
public void in(String propertyName, Collection<? extends Object> values) {
public void in(String propertyName, Collection<?> values) {
query.has(propertyName, Contain.IN, values);
}
......
......@@ -39,13 +39,13 @@ public class LocalLockMediatorTest {
public void testLock() throws InterruptedException {
TimestampProvider times = Timestamps.MICRO;
LocalLockMediator<HBaseTransaction> llm =
new LocalLockMediator<HBaseTransaction>(LOCK_NAMESPACE, times);
new LocalLockMediator<>(LOCK_NAMESPACE, times);
//Expire immediately
Assert.assertTrue(llm.lock(kc, mockTx1, times.getTime(0, TimeUnit.NANOSECONDS)));
Assert.assertTrue(llm.lock(kc, mockTx2, times.getTime(Long.MAX_VALUE, TimeUnit.NANOSECONDS)));
llm = new LocalLockMediator<HBaseTransaction>(LOCK_NAMESPACE, times);
llm = new LocalLockMediator<>(LOCK_NAMESPACE, times);
//Expire later
Assert.assertTrue(llm.lock(kc, mockTx1, times.getTime(Long.MAX_VALUE, TimeUnit.NANOSECONDS)));
......
......@@ -402,7 +402,7 @@ public class GraphQueryTest extends AbstractGraphDatabaseTest {
}
private static <T> List<T> toList(Iterable<T> itr) {
List<T> result = new ArrayList<T>();
List<T> result = new ArrayList<>();
for(T object : itr) {
result.add(object);
}
......
......@@ -90,38 +90,24 @@ public class Titan0DatabaseTest {
testProperty(graph, "booleanProperty", Boolean.TRUE);
testProperty(graph, "booleanProperty", Boolean.FALSE);
testProperty(graph, "booleanProperty", new Boolean(Boolean.TRUE));
testProperty(graph, "booleanProperty", new Boolean(Boolean.FALSE));
testProperty(graph, "byteProperty", Byte.MAX_VALUE);
testProperty(graph, "byteProperty", Byte.MIN_VALUE);
testProperty(graph, "byteProperty", new Byte(Byte.MAX_VALUE));
testProperty(graph, "byteProperty", new Byte(Byte.MIN_VALUE));
testProperty(graph, "shortProperty", Short.MAX_VALUE);
testProperty(graph, "shortProperty", Short.MIN_VALUE);
testProperty(graph, "shortProperty", new Short(Short.MAX_VALUE));
testProperty(graph, "shortProperty", new Short(Short.MIN_VALUE));
testProperty(graph, "intProperty", Integer.MAX_VALUE);
testProperty(graph, "intProperty", Integer.MIN_VALUE);
testProperty(graph, "intProperty", new Integer(Integer.MAX_VALUE));
testProperty(graph, "intProperty", new Integer(Integer.MIN_VALUE));
testProperty(graph, "longProperty", Long.MIN_VALUE);
testProperty(graph, "longProperty", Long.MAX_VALUE);
testProperty(graph, "longProperty", new Long(Long.MIN_VALUE));
testProperty(graph, "longProperty", new Long(Long.MAX_VALUE));
testProperty(graph, "doubleProperty", Double.MAX_VALUE);
testProperty(graph, "doubleProperty", Double.MIN_VALUE);
testProperty(graph, "doubleProperty", new Double(Double.MAX_VALUE));
testProperty(graph, "doubleProperty", new Double(Double.MIN_VALUE));
testProperty(graph, "floatProperty", Float.MAX_VALUE);
testProperty(graph, "floatProperty", Float.MIN_VALUE);
testProperty(graph, "floatProperty", new Float(Float.MAX_VALUE));
testProperty(graph, "floatProperty", new Float(Float.MIN_VALUE));
// enumerations - TypeCategory
testProperty(graph, "typeCategoryProperty", TypeCategory.CLASS);
......@@ -147,7 +133,7 @@ public class Titan0DatabaseTest {
@Test
public <V, E> void testMultiplicityOnePropertySupport() {
AtlasGraph<V, E> graph = (AtlasGraph<V, E>) getGraph();
AtlasGraph<V, E> graph = getGraph();
AtlasVertex<V, E> vertex = graph.addVertex();
vertex.setProperty("name", "Jeff");
......@@ -183,7 +169,7 @@ public class Titan0DatabaseTest {
@Test
public <V, E> void testRemoveEdge() {
AtlasGraph<V, E> graph = (AtlasGraph<V, E>) getGraph();
AtlasGraph<V, E> graph = getGraph();
AtlasVertex<V, E> v1 = graph.addVertex();
AtlasVertex<V, E> v2 = graph.addVertex();
......@@ -205,7 +191,7 @@ public class Titan0DatabaseTest {
@Test
public <V, E> void testRemoveVertex() {
AtlasGraph<V, E> graph = (AtlasGraph<V, E>) getGraph();
AtlasGraph<V, E> graph = getGraph();
AtlasVertex<V, E> v1 = graph.addVertex();
......@@ -219,7 +205,7 @@ public class Titan0DatabaseTest {
@Test
public <V, E> void testGetEdges() {
AtlasGraph<V, E> graph = (AtlasGraph<V, E>) getGraph();
AtlasGraph<V, E> graph = getGraph();
AtlasVertex<V, E> v1 = graph.addVertex();
AtlasVertex<V, E> v2 = graph.addVertex();
AtlasVertex<V, E> v3 = graph.addVertex();
......@@ -296,7 +282,7 @@ public class Titan0DatabaseTest {
AtlasGraph<V, E> graph = getGraph();
AtlasVertex<V, E> vertex = graph.addVertex();
vertex.setListProperty("colors", Arrays.asList(new String[] { "red", "blue", "green" }));
vertex.setListProperty("colors", Arrays.asList("red", "blue", "green"));
List<String> colors = vertex.getListProperty("colors");
assertTrue(colors.contains("red"));
assertTrue(colors.contains("blue"));
......@@ -419,7 +405,7 @@ public class Titan0DatabaseTest {
}
private static <T> List<T> toList(Iterable<? extends T> iterable) {
List<T> result = new ArrayList<T>();
List<T> result = new ArrayList<>();
for (T item : iterable) {
result.add(item);
}
......
......@@ -48,7 +48,7 @@ public class SearchFilter {
/**
* to specify whether the result should be sorted? If yes, whether asc or desc.
*/
public enum SortType { NONE, ASC, DESC };
public enum SortType { NONE, ASC, DESC }
private MultivaluedMap<String, String> params = null;
private long startIndex = 0;
......
......@@ -55,7 +55,7 @@ public class AtlasEntity extends AtlasStruct implements Serializable {
/**
* Status of the entity - can be active or deleted. Deleted entities are not removed from Atlas store.
*/
public enum Status { STATUS_ACTIVE, STATUS_DELETED };
public enum Status { STATUS_ACTIVE, STATUS_DELETED }
private String guid = null;
private Status status = Status.STATUS_ACTIVE;
......
......@@ -18,7 +18,6 @@
package org.apache.atlas.model.instance;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
......
......@@ -29,7 +29,6 @@ import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSeeAlso;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
......
......@@ -119,7 +119,7 @@ public class AtlasStruct implements Serializable {
if (a != null) {
a.put(name, value);
} else {
a = new HashMap<String, Object>();
a = new HashMap<>();
a.put(name, value);
this.attributes = a;
......@@ -208,7 +208,7 @@ public class AtlasStruct implements Serializable {
return sb;
}
public static StringBuilder dumpObjects(Collection<? extends Object> objects, StringBuilder sb) {
public static StringBuilder dumpObjects(Collection<?> objects, StringBuilder sb) {
if (sb == null) {
sb = new StringBuilder();
}
......@@ -228,14 +228,14 @@ public class AtlasStruct implements Serializable {
return sb;
}
public static StringBuilder dumpObjects(Map<? extends Object, ? extends Object> objects, StringBuilder sb) {
public static StringBuilder dumpObjects(Map<?, ?> objects, StringBuilder sb) {
if (sb == null) {
sb = new StringBuilder();
}
if (MapUtils.isNotEmpty(objects)) {
int i = 0;
for (Map.Entry<? extends Object, ? extends Object> e : objects.entrySet()) {
for (Map.Entry<?, ?> e : objects.entrySet()) {
if (i > 0) {
sb.append(", ");
}
......
......@@ -29,7 +29,6 @@ import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
......@@ -71,7 +70,7 @@ public class EntityMutationResponse {
public void addEntity(EntityMutations.EntityOperation op, AtlasEntityHeader header) {
if (entitiesMutated == null) {
entitiesMutated = new HashMap<EntityMutations.EntityOperation, List<AtlasEntityHeader>>();
entitiesMutated = new HashMap<>();
}
if (entitiesMutated != null && entitiesMutated.get(op) == null) {
......
......@@ -27,7 +27,6 @@ import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
......
......@@ -20,6 +20,7 @@ package org.apache.atlas.model.typedef;
import org.apache.atlas.model.TypeCategory;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.codehaus.jackson.annotate.JsonAutoDetect;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.map.annotate.JsonSerialize;
......@@ -278,6 +279,7 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable {
return sb;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
......@@ -313,11 +315,6 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable {
return result;
}
@Override
public String toString() {
return toString(new StringBuilder()).toString();
}
public static String getArrayTypeName(String elemTypeName) {
return ATLAS_TYPE_ARRAY_PREFIX + elemTypeName + ATLAS_TYPE_ARRAY_SUFFIX;
}
......@@ -327,7 +324,7 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable {
valueTypeName, ATLAS_TYPE_MAP_SUFFIX);
}
public static StringBuilder dumpObjects(Collection<? extends Object> objects, StringBuilder sb) {
public static StringBuilder dumpObjects(Collection<?> objects, StringBuilder sb) {
if (sb == null) {
sb = new StringBuilder();
}
......@@ -347,14 +344,14 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable {
return sb;
}
public static StringBuilder dumpObjects(Map<? extends Object, ? extends Object> objects, StringBuilder sb) {
public static StringBuilder dumpObjects(Map<?, ?> objects, StringBuilder sb) {
if (sb == null) {
sb = new StringBuilder();
}
if (MapUtils.isNotEmpty(objects)) {
int i = 0;
for (Map.Entry<? extends Object, ? extends Object> e : objects.entrySet()) {
for (Map.Entry<?, ?> e : objects.entrySet()) {
if (i > 0) {
sb.append(", ");
}
......
......@@ -103,9 +103,9 @@ public class AtlasClassificationDef extends AtlasStructDef implements java.io.Se
}
if (CollectionUtils.isEmpty(superTypes)) {
this.superTypes = new HashSet<String>();
this.superTypes = new HashSet<>();
} else {
this.superTypes = new HashSet<String>(superTypes);
this.superTypes = new HashSet<>(superTypes);
}
}
......@@ -117,7 +117,7 @@ public class AtlasClassificationDef extends AtlasStructDef implements java.io.Se
Set<String> s = this.superTypes;
if (!hasSuperType(s, typeName)) {
s = new HashSet<String>(s);
s = new HashSet<>(s);
s.add(typeName);
......@@ -129,7 +129,7 @@ public class AtlasClassificationDef extends AtlasStructDef implements java.io.Se
Set<String> s = this.superTypes;
if (hasSuperType(s, typeName)) {
s = new HashSet<String>(s);
s = new HashSet<>(s);
s.remove(typeName);
......
......@@ -101,9 +101,9 @@ public class AtlasEntityDef extends AtlasStructDef implements java.io.Serializab
}
if (CollectionUtils.isEmpty(superTypes)) {
this.superTypes = new HashSet<String>();
this.superTypes = new HashSet<>();
} else {
this.superTypes = new HashSet<String>(superTypes);
this.superTypes = new HashSet<>(superTypes);
}
}
......@@ -115,7 +115,7 @@ public class AtlasEntityDef extends AtlasStructDef implements java.io.Serializab
Set<String> s = this.superTypes;
if (!hasSuperType(s, typeName)) {
s = new HashSet<String>(s);
s = new HashSet<>(s);
s.add(typeName);
......@@ -127,7 +127,7 @@ public class AtlasEntityDef extends AtlasStructDef implements java.io.Serializab
Set<String> s = this.superTypes;
if (hasSuperType(s, typeName)) {
s = new HashSet<String>(s);
s = new HashSet<>(s);
s.remove(typeName);
......
......@@ -107,11 +107,11 @@ public class AtlasEnumDef extends AtlasBaseTypeDef implements Serializable {
}
if (CollectionUtils.isEmpty(elementDefs)) {
this.elementDefs = new ArrayList<AtlasEnumElementDef>();
this.elementDefs = new ArrayList<>();
} else {
// if multiple elements with same value are present, keep only the last entry
List<AtlasEnumElementDef> tmpList = new ArrayList<AtlasEnumElementDef>(elementDefs.size());
Set<String> elementValues = new HashSet<String>();
List<AtlasEnumElementDef> tmpList = new ArrayList<>(elementDefs.size());
Set<String> elementValues = new HashSet<>();
ListIterator<AtlasEnumElementDef> iter = elementDefs.listIterator(elementDefs.size());
while (iter.hasPrevious()) {
......@@ -149,7 +149,7 @@ public class AtlasEnumDef extends AtlasBaseTypeDef implements Serializable {
public void addElement(AtlasEnumElementDef elementDef) {
List<AtlasEnumElementDef> e = this.elementDefs;
List<AtlasEnumElementDef> tmpList = new ArrayList<AtlasEnumElementDef>();
List<AtlasEnumElementDef> tmpList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(e)) {
// copy existing elements, except ones having same value as the element being added
for (AtlasEnumElementDef existingElem : e) {
......@@ -168,7 +168,7 @@ public class AtlasEnumDef extends AtlasBaseTypeDef implements Serializable {
// if element doesn't exist, no need to create the tmpList below
if (hasElement(e, elemValue)) {
List<AtlasEnumElementDef> tmpList = new ArrayList<AtlasEnumElementDef>();
List<AtlasEnumElementDef> tmpList = new ArrayList<>();
// copy existing elements, except ones having same value as the element being removed
for (AtlasEnumElementDef existingElem : e) {
......
......@@ -106,11 +106,11 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
}
if (CollectionUtils.isEmpty(attributeDefs)) {
this.attributeDefs = new ArrayList<AtlasAttributeDef>();
this.attributeDefs = new ArrayList<>();
} else {
// if multiple attributes with same name are present, keep only the last entry
List<AtlasAttributeDef> tmpList = new ArrayList<AtlasAttributeDef>(attributeDefs.size());
Set<String> attribNames = new HashSet<String>();
List<AtlasAttributeDef> tmpList = new ArrayList<>(attributeDefs.size());
Set<String> attribNames = new HashSet<>();
ListIterator<AtlasAttributeDef> iter = attributeDefs.listIterator(attributeDefs.size());
while (iter.hasPrevious()) {
......@@ -144,7 +144,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
List<AtlasAttributeDef> a = this.attributeDefs;
List<AtlasAttributeDef> tmpList = new ArrayList<AtlasAttributeDef>();
List<AtlasAttributeDef> tmpList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(a)) {
// copy existing attributes, except ones having same name as the attribute being added
for (AtlasAttributeDef existingAttrDef : a) {
......@@ -162,7 +162,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
List<AtlasAttributeDef> a = this.attributeDefs;
if (hasAttribute(a, attrName)) {
List<AtlasAttributeDef> tmpList = new ArrayList<AtlasAttributeDef>();
List<AtlasAttributeDef> tmpList = new ArrayList<>();
// copy existing attributes, except ones having same name as the attribute being removed
for (AtlasAttributeDef existingAttrDef : a) {
......@@ -256,7 +256,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
/**
* single-valued attribute or multi-valued attribute.
*/
public enum Cardinality { SINGLE, LIST, SET };
public enum Cardinality { SINGLE, LIST, SET }
public static final int COUNT_NOT_SET = -1;
......@@ -376,7 +376,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
if (CollectionUtils.isEmpty(constraintDefs)) {
this.constraintDefs = null;
} else {
this.constraintDefs = new ArrayList<AtlasConstraintDef>(constraintDefs);
this.constraintDefs = new ArrayList<>(constraintDefs);
}
}
......@@ -482,7 +482,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
this.type = type;
if (params != null) {
this.params = new HashMap<String, Object>(params);
this.params = new HashMap<>(params);
}
}
......@@ -491,7 +491,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
this.type = that.type;
if (that.params != null) {
this.params = new HashMap<String, Object>(that.params);
this.params = new HashMap<>(that.params);
}
}
}
......
......@@ -111,8 +111,8 @@ public class AtlasArrayType extends AtlasType {
}
@Override
public Collection<? extends Object> createDefaultValue() {
Collection<Object> ret = new ArrayList<Object>();
public Collection<?> createDefaultValue() {
Collection<Object> ret = new ArrayList<>();
ret.add(elementType.createDefaultValue());
......@@ -161,13 +161,13 @@ public class AtlasArrayType extends AtlasType {
}
@Override
public Collection<? extends Object> getNormalizedValue(Object obj) {
public Collection<?> getNormalizedValue(Object obj) {
if (obj == null) {
return null;
}
if (obj instanceof List || obj instanceof Set) {
List<Object> ret = new ArrayList<Object>();
List<Object> ret = new ArrayList<>();
Collection objList = (Collection) obj;
......@@ -191,7 +191,7 @@ public class AtlasArrayType extends AtlasType {
return ret;
} else if (obj.getClass().isArray()) {
List<Object> ret = new ArrayList<Object>();
List<Object> ret = new ArrayList<>();
int arrayLen = Array.getLength(obj);
......
......@@ -73,7 +73,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of byte type.
*/
public static class AtlasByteType extends AtlasType {
private static final Byte DEFAULT_VALUE = new Byte((byte)0);
private static final Byte DEFAULT_VALUE = (byte) 0;
public AtlasByteType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_BYTE, TypeCategory.PRIMITIVE);
......@@ -117,7 +117,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of short type.
*/
public static class AtlasShortType extends AtlasType {
private static final Short DEFAULT_VALUE = new Short((short)0);
private static final Short DEFAULT_VALUE = (short) 0;
public AtlasShortType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_SHORT, TypeCategory.PRIMITIVE);
......@@ -161,7 +161,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of integer type.
*/
public static class AtlasIntType extends AtlasType {
private static final Integer DEFAULT_VALUE = new Integer(0);
private static final Integer DEFAULT_VALUE = 0;
public AtlasIntType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_INT, TypeCategory.PRIMITIVE);
......@@ -205,7 +205,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of long type.
*/
public static class AtlasLongType extends AtlasType {
private static final Long DEFAULT_VALUE = new Long(0);
private static final Long DEFAULT_VALUE = 0L;
public AtlasLongType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_LONG, TypeCategory.PRIMITIVE);
......@@ -249,7 +249,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of float type.
*/
public static class AtlasFloatType extends AtlasType {
private static final Float DEFAULT_VALUE = new Float(0);
private static final Float DEFAULT_VALUE = 0f;
public AtlasFloatType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_FLOAT, TypeCategory.PRIMITIVE);
......@@ -293,7 +293,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of double type.
*/
public static class AtlasDoubleType extends AtlasType {
private static final Double DEFAULT_VALUE = new Double(0);
private static final Double DEFAULT_VALUE = 0d;
public AtlasDoubleType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_DOUBLE, TypeCategory.PRIMITIVE);
......
......@@ -20,7 +20,6 @@ package org.apache.atlas.type;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.instance.AtlasClassification;
import org.apache.atlas.model.typedef.AtlasClassificationDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
......@@ -146,11 +145,11 @@ public class AtlasClassificationType extends AtlasStructType {
}
public boolean isSuperTypeOf(AtlasClassificationType classificationType) {
return classificationType != null ? classificationType.getAllSuperTypes().contains(this.getTypeName()) : false;
return classificationType != null && classificationType.getAllSuperTypes().contains(this.getTypeName());
}
public boolean isSubTypeOf(AtlasClassificationType classificationType) {
return classificationType != null ? allSuperTypes.contains(classificationType.getTypeName()) : false;
return classificationType != null && allSuperTypes.contains(classificationType.getTypeName());
}
@Override
......
......@@ -20,7 +20,6 @@ package org.apache.atlas.type;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.typedef.AtlasEntityDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
......@@ -144,11 +143,11 @@ public class AtlasEntityType extends AtlasStructType {
}
public boolean isSuperTypeOf(AtlasEntityType entityType) {
return entityType != null ? entityType.getAllSuperTypes().contains(this.getTypeName()) : false;
return entityType != null && entityType.getAllSuperTypes().contains(this.getTypeName());
}
public boolean isSubTypeOf(AtlasEntityType entityType) {
return entityType != null ? allSuperTypes.contains(entityType.getTypeName()) : false;
return entityType != null && allSuperTypes.contains(entityType.getTypeName());
}
@Override
......
......@@ -23,7 +23,6 @@ import java.util.HashMap;
import java.util.Map;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.typedef.AtlasEnumDef;
import org.apache.atlas.model.typedef.AtlasEnumDef.AtlasEnumElementDef;
......@@ -39,7 +38,7 @@ public class AtlasEnumType extends AtlasType {
public AtlasEnumType(AtlasEnumDef enumDef) {
super(enumDef);
Map<String, AtlasEnumElementDef> e = new HashMap<String, AtlasEnumElementDef>();
Map<String, AtlasEnumElementDef> e = new HashMap<>();
for (AtlasEnumElementDef elementDef : enumDef.getElementDefs()) {
e.put(elementDef.getValue().toLowerCase(), elementDef);
......
......@@ -93,7 +93,7 @@ public class AtlasMapType extends AtlasType {
@Override
public Map<Object, Object> createDefaultValue() {
Map<Object, Object> ret = new HashMap<Object, Object>();
Map<Object, Object> ret = new HashMap<>();
ret.put(keyType.createDefaultValue(), valueType.createDefaultValue());
......@@ -126,7 +126,7 @@ public class AtlasMapType extends AtlasType {
}
if (obj instanceof Map) {
Map<Object, Object> ret = new HashMap<Object, Object>();
Map<Object, Object> ret = new HashMap<>();
Map<Object, Objects> map = (Map<Object, Objects>) obj;
......
......@@ -51,7 +51,7 @@ public class AtlasStructType extends AtlasType {
private Map<String, AtlasType> attrTypes = Collections.emptyMap();
private Set<String> foreignKeyAttributes = new HashSet<>();
private Map<String, TypeAttributePair> mappedFromRefAttributes = new HashMap<String, TypeAttributePair>();
private Map<String, TypeAttributePair> mappedFromRefAttributes = new HashMap<>();
public AtlasStructType(AtlasStructDef structDef) {
......@@ -101,7 +101,7 @@ public class AtlasStructType extends AtlasType {
@Override
public void resolveReferences(AtlasTypeRegistry typeRegistry) throws AtlasBaseException {
Map<String, AtlasType> a = new HashMap<String, AtlasType>();
Map<String, AtlasType> a = new HashMap<>();
for (AtlasAttributeDef attributeDef : structDef.getAttributeDefs()) {
AtlasType attrType = typeRegistry.getType(attributeDef.getTypeName());
......@@ -275,7 +275,7 @@ public class AtlasStructType extends AtlasType {
Map<String, Object> attributes = obj.getAttributes();
if (attributes == null) {
attributes = new HashMap<String, Object>();
attributes = new HashMap<>();
}
for (AtlasAttributeDef attributeDef : structDef.getAttributeDefs()) {
......@@ -348,11 +348,14 @@ public class AtlasStructType extends AtlasType {
continue;
}
if (constraintType.equals(AtlasConstraintDef.CONSTRAINT_TYPE_FOREIGN_KEY)) {
switch (constraintType) {
case AtlasConstraintDef.CONSTRAINT_TYPE_FOREIGN_KEY:
resolveForeignKeyConstraint(attribDef, constraintDef, attribType);
} else if (constraintType.equals(CONSTRAINT_TYPE_MAPPED_FROM_REF)) {
break;
case CONSTRAINT_TYPE_MAPPED_FROM_REF:
resolveMappedFromRefConstraint(attribDef, constraintDef, attribType);
} else {
break;
default:
throw new AtlasBaseException(AtlasErrorCode.UNKNOWN_CONSTRAINT, constraintType,
getTypeName(), attribDef.getName());
}
......
......@@ -539,13 +539,13 @@ public class AtlasTypeRegistry {
LOG.debug("==> AtlasTypeRegistry.updateType({})", typeDef);
}
if (typeDef == null) {
// ignore
} else if (StringUtils.isNotBlank(typeDef.getGuid())) {
if (typeDef != null) {
if (StringUtils.isNotBlank(typeDef.getGuid())) {
updateTypeByGuidWithNoRefResolve(typeDef.getGuid(), typeDef);
} else if (StringUtils.isNotBlank(typeDef.getName())) {
updateTypeByNameWithNoRefResolve(typeDef.getName(), typeDef);
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== AtlasTypeRegistry.updateType({})", typeDef);
......@@ -694,15 +694,13 @@ class TypeCache {
}
public AtlasType getTypeByGuid(String guid) {
AtlasType ret = guid != null ? typeGuidMap.get(guid) : null;
return ret;
return guid != null ? typeGuidMap.get(guid) : null;
}
public AtlasType getTypeByName(String name) {
AtlasType ret = name != null ? typeNameMap.get(name) : null;
return ret;
return name != null ? typeNameMap.get(name) : null;
}
public void updateGuid(String typeName, String currGuid, String newGuid) {
......@@ -768,15 +766,13 @@ class TypeDefCache<T extends AtlasBaseTypeDef> {
}
public T getTypeDefByGuid(String guid) {
T ret = guid != null ? typeDefGuidMap.get(guid) : null;
return ret;
return guid != null ? typeDefGuidMap.get(guid) : null;
}
public T getTypeDefByName(String name) {
T ret = name != null ? typeDefNameMap.get(name) : null;
return ret;
return name != null ? typeDefNameMap.get(name) : null;
}
public void updateGuid(String typeName, String newGuid) {
......
......@@ -51,14 +51,14 @@ import static org.apache.atlas.model.typedef.AtlasBaseTypeDef.ATLAS_TYPE_MAP_SUF
* Utility methods for AtlasType/AtlasTypeDef.
*/
public class AtlasTypeUtil {
private static final Set<String> ATLAS_BUILTIN_TYPENAMES = new HashSet<String>();
private static final Set<String> ATLAS_BUILTIN_TYPENAMES = new HashSet<>();
static {
Collections.addAll(ATLAS_BUILTIN_TYPENAMES, AtlasBaseTypeDef.ATLAS_BUILTIN_TYPES);
}
public static Set<String> getReferencedTypeNames(String typeName) {
Set<String> ret = new HashSet<String>();
Set<String> ret = new HashSet<>();
getReferencedTypeNames(typeName, ret);
......
......@@ -656,7 +656,7 @@ public final class TestUtilsV2 {
AtlasTypeUtil.createRequiredAttrDef("level", "int"));
AtlasClassificationDef janitorSecurityClearanceTypeDef =
AtlasTypeUtil.createTraitTypeDef("JanitorClearance", "JanitorClearance_description", ImmutableSet.<String>of("SecurityClearance1"),
AtlasTypeUtil.createTraitTypeDef("JanitorClearance", "JanitorClearance_description", ImmutableSet.of("SecurityClearance1"),
AtlasTypeUtil.createRequiredAttrDef("level", "int"));
return Arrays.asList(securityClearanceTypeDef, janitorSecurityClearanceTypeDef);
......
......@@ -357,28 +357,28 @@ public final class ModelTestUtil {
}
public static List<AtlasAttributeDef> newAttributeDefsWithAllBuiltInTypes(String attrNamePrefix) {
List<AtlasAttributeDef> ret = new ArrayList<AtlasAttributeDef>();
List<AtlasAttributeDef> ret = new ArrayList<>();
// add all built-in types
for (int i = 0; i < ATLAS_BUILTIN_TYPES.length; i++) {
ret.add(getAttributeDef(attrNamePrefix, ATLAS_BUILTIN_TYPES[i]));
for (String ATLAS_BUILTIN_TYPE2 : ATLAS_BUILTIN_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, ATLAS_BUILTIN_TYPE2));
}
// add enum types
ret.add(getAttributeDef(attrNamePrefix, ENUM_DEF.getName()));
ret.add(getAttributeDef(attrNamePrefix, ENUM_DEF_WITH_NO_DEFAULT.getName()));
// add array of built-in types
for (int i = 0; i < ATLAS_BUILTIN_TYPES.length; i++) {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ATLAS_BUILTIN_TYPES[i])));
for (String ATLAS_BUILTIN_TYPE1 : ATLAS_BUILTIN_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ATLAS_BUILTIN_TYPE1)));
}
// add array of enum types
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ENUM_DEF.getName())));
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ENUM_DEF_WITH_NO_DEFAULT.getName())));
// add few map types
for (int i = 0; i < ATLAS_PRIMITIVE_TYPES.length; i++) {
for (String ATLAS_PRIMITIVE_TYPE3 : ATLAS_PRIMITIVE_TYPES) {
ret.add(getAttributeDef(attrNamePrefix,
AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPES[i], getRandomBuiltInType())));
AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPE3, getRandomBuiltInType())));
}
// add map types with enum as key
ret.add(getAttributeDef(attrNamePrefix,
......@@ -392,7 +392,7 @@ public final class ModelTestUtil {
AtlasBaseTypeDef.getMapTypeName(getRandomPrimitiveType(), ENUM_DEF_WITH_NO_DEFAULT.getName())));
// add few array of arrays
for (int i = 0; i < ATLAS_BUILTIN_TYPES.length; i++) {
for (String ATLAS_BUILTIN_TYPE : ATLAS_BUILTIN_TYPES) {
ret.add(getAttributeDef(attrNamePrefix,
AtlasBaseTypeDef.getArrayTypeName(AtlasBaseTypeDef.getArrayTypeName(getRandomBuiltInType()))));
}
......@@ -400,9 +400,9 @@ public final class ModelTestUtil {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ENUM_DEF_WITH_NO_DEFAULT.getName())));
// add few array of maps
for (int i = 0; i < ATLAS_PRIMITIVE_TYPES.length; i++) {
for (String ATLAS_PRIMITIVE_TYPE2 : ATLAS_PRIMITIVE_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(
AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPES[i], getRandomBuiltInType()))));
AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPE2, getRandomBuiltInType()))));
}
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(
AtlasBaseTypeDef.getMapTypeName(ENUM_DEF.getName(), getRandomBuiltInType()))));
......@@ -414,15 +414,15 @@ public final class ModelTestUtil {
AtlasBaseTypeDef.getMapTypeName(getRandomPrimitiveType(), ENUM_DEF_WITH_NO_DEFAULT.getName()))));
// add few map of arrays
for (int i = 0; i < ATLAS_PRIMITIVE_TYPES.length; i++) {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPES[i],
for (String ATLAS_PRIMITIVE_TYPE1 : ATLAS_PRIMITIVE_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPE1,
AtlasBaseTypeDef.getArrayTypeName(getRandomBuiltInType()))));
}
// add few map of maps
for (int i = 0; i < ATLAS_PRIMITIVE_TYPES.length; i++) {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPES[i],
AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPES[i], getRandomBuiltInType()))));
for (String ATLAS_PRIMITIVE_TYPE : ATLAS_PRIMITIVE_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPE,
AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPE, getRandomBuiltInType()))));
}
return ret;
......
......@@ -103,7 +103,7 @@ public class TestAtlasEntityDef {
AtlasEntityDef entityDef = ModelTestUtil.newEntityDefWithSuperTypes();
Set<String> oldSuperTypes = entityDef.getSuperTypes();
Set<String> newSuperTypes = new HashSet<String>();
Set<String> newSuperTypes = new HashSet<>();
newSuperTypes.add("newType-abcd-1234");
......
......@@ -93,7 +93,7 @@ public class TestAtlasEnumDef {
AtlasEnumDef enumDef = ModelTestUtil.newEnumDef();
List<AtlasEnumElementDef> oldElements = enumDef.getElementDefs();
List<AtlasEnumElementDef> newElements = new ArrayList<AtlasEnumElementDef>();
List<AtlasEnumElementDef> newElements = new ArrayList<>();
newElements.add(new AtlasEnumElementDef("newElement", "new Element", 100));
......
......@@ -33,14 +33,14 @@ public class TestAtlasArrayType {
private final Object[] invalidValues;
{
List<Integer> intList = new ArrayList<Integer>();
Set<Integer> intSet = new HashSet<Integer>();
List<Integer> intList = new ArrayList<>();
Set<Integer> intSet = new HashSet<>();
Integer[] intArray = new Integer[] { 1, 2, 3 };
List<Object> objList = new ArrayList<Object>();
Set<Object> objSet = new HashSet<Object>();
List<Object> objList = new ArrayList<>();
Set<Object> objSet = new HashSet<>();
Object[] objArray = new Object[] { 1, 2, 3 };
List<String> strList = new ArrayList<String>();
Set<String> strSet = new HashSet<String>();
List<String> strList = new ArrayList<>();
Set<String> strSet = new HashSet<>();
String[] strArray = new String[] { "1", "2", "3" };
for (int i = 0; i < 10; i++) {
......@@ -105,7 +105,7 @@ public class TestAtlasArrayType {
@Test
public void testArrayTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(intArrayType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -94,7 +94,7 @@ public class TestAtlasBigDecimalType {
@Test
public void testBigDecimalTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(bigDecimalType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -95,7 +95,7 @@ public class TestAtlasBigIntegerType {
@Test
public void testBigIntegerTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(bigIntegerType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -73,7 +73,7 @@ public class TestAtlasBooleanType {
@Test
public void testBooleanTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(booleanType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -95,7 +95,7 @@ public class TestAtlasByteType {
@Test
public void testByteTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(byteType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -31,8 +31,8 @@ import static org.testng.Assert.*;
public class TestAtlasClassificationType {
private final AtlasClassificationType classificationType;
private final List<Object> validValues = new ArrayList<Object>();
private final List<Object> invalidValues = new ArrayList<Object>();
private final List<Object> validValues = new ArrayList<>();
private final List<Object> invalidValues = new ArrayList<>();
{
classificationType = getClassificationType(ModelTestUtil.getClassificationDefWithSuperTypes());
......@@ -55,7 +55,7 @@ public class TestAtlasClassificationType {
invalidValues.add(invalidValue2);
invalidValues.add(invalidValue3);
invalidValues.add(new AtlasClassification()); // no values for mandatory attributes
invalidValues.add(new HashMap<Object, Object>()); // no values for mandatory attributes
invalidValues.add(new HashMap<>()); // no values for mandatory attributes
invalidValues.add(1); // incorrect datatype
invalidValues.add(new HashSet()); // incorrect datatype
invalidValues.add(new ArrayList()); // incorrect datatype
......@@ -102,7 +102,7 @@ public class TestAtlasClassificationType {
@Test
public void testClassificationTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(classificationType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -107,7 +107,7 @@ public class TestAtlasDateType {
@Test
public void testDateTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(dateType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -95,7 +95,7 @@ public class TestAtlasDoubleType {
@Test
public void testDoubleTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(doubleType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -38,8 +38,8 @@ import static org.testng.Assert.*;
public class TestAtlasEntityType {
private final AtlasEntityType entityType;
private final List<Object> validValues = new ArrayList<Object>();
private final List<Object> invalidValues = new ArrayList<Object>();
private final List<Object> validValues = new ArrayList<>();
private final List<Object> invalidValues = new ArrayList<>();
{
entityType = getEntityType(ModelTestUtil.getEntityDefWithSuperTypes());
......@@ -62,7 +62,7 @@ public class TestAtlasEntityType {
invalidValues.add(invalidValue2);
invalidValues.add(invalidValue3);
invalidValues.add(new AtlasEntity()); // no values for mandatory attributes
invalidValues.add(new HashMap<Object, Object>()); // no values for mandatory attributes
invalidValues.add(new HashMap<>()); // no values for mandatory attributes
invalidValues.add(1); // incorrect datatype
invalidValues.add(new HashSet()); // incorrect datatype
invalidValues.add(new ArrayList()); // incorrect datatype
......@@ -109,7 +109,7 @@ public class TestAtlasEntityType {
@Test
public void testEntityTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(entityType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -95,7 +95,7 @@ public class TestAtlasFloatType {
@Test
public void testFloatTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(floatType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -95,7 +95,7 @@ public class TestAtlasIntType {
@Test
public void testIntTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(intType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -95,7 +95,7 @@ public class TestAtlasLongType {
@Test
public void testLongTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(longType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -34,13 +34,13 @@ public class TestAtlasMapType {
private final Object[] invalidValues;
{
Map<String, Integer> strIntMap = new HashMap<String, Integer>();
Map<String, Double> strDoubleMap = new HashMap<String, Double>();
Map<String, String> strStringMap = new HashMap<String, String>();
Map<Integer, Integer> intIntMap = new HashMap<Integer, Integer>();
Map<Object, Object> objObjMap = new HashMap<Object, Object>();
Map<Object, Object> invObjObjMap1 = new HashMap<Object, Object>();
Map<Object, Object> invObjObjMap2 = new HashMap<Object, Object>();
Map<String, Integer> strIntMap = new HashMap<>();
Map<String, Double> strDoubleMap = new HashMap<>();
Map<String, String> strStringMap = new HashMap<>();
Map<Integer, Integer> intIntMap = new HashMap<>();
Map<Object, Object> objObjMap = new HashMap<>();
Map<Object, Object> invObjObjMap1 = new HashMap<>();
Map<Object, Object> invObjObjMap2 = new HashMap<>();
for (int i = 0; i < 10; i++) {
strIntMap.put(Integer.toString(i), i);
......@@ -54,7 +54,7 @@ public class TestAtlasMapType {
invObjObjMap2.put("123", "xyz"); // invalid value
validValues = new Object[] {
null, new HashMap<String, Integer>(), new HashMap<Object, Object>(), strIntMap, strDoubleMap, strStringMap,
null, new HashMap<String, Integer>(), new HashMap<>(), strIntMap, strDoubleMap, strStringMap,
intIntMap, objObjMap,
};
......@@ -101,7 +101,7 @@ public class TestAtlasMapType {
@Test
public void testMapTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(intIntMapType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -37,11 +37,11 @@ public class TestAtlasObjectIdType {
private final Object[] invalidValues;
{
Map<String, String> objectId1 = new HashMap<String, String>();
Map<Object, Object> objectId2 = new HashMap<Object, Object>();
Map<Object, Object> objectId3 = new HashMap<Object, Object>();
Map<Object, Object> objectId4 = new HashMap<Object, Object>();
Map<Object, Object> objectId5 = new HashMap<Object, Object>();
Map<String, String> objectId1 = new HashMap<>();
Map<Object, Object> objectId2 = new HashMap<>();
Map<Object, Object> objectId3 = new HashMap<>();
Map<Object, Object> objectId4 = new HashMap<>();
Map<Object, Object> objectId5 = new HashMap<>();
objectId1.put(AtlasObjectId.KEY_TYPENAME, "testType");
objectId1.put(AtlasObjectId.KEY_GUID, "guid-1234");
......@@ -107,7 +107,7 @@ public class TestAtlasObjectIdType {
@Test
public void testObjectIdTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(objectIdType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -95,7 +95,7 @@ public class TestAtlasShortType {
@Test
public void testShortTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(shortType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -75,7 +75,7 @@ public class TestAtlasStringType {
@Test
public void testStringTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(stringType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -78,8 +78,8 @@ public class TestAtlasStructType {
structDef.addAttribute(multiValuedAttribMax);
structType = getStructType(structDef);
validValues = new ArrayList<Object>();
invalidValues = new ArrayList<Object>();
validValues = new ArrayList<>();
invalidValues = new ArrayList<>();
AtlasStruct invalidValue1 = structType.createDefaultValue();
AtlasStruct invalidValue2 = structType.createDefaultValue();
......@@ -121,7 +121,7 @@ public class TestAtlasStructType {
invalidValues.add(invalidValue6);
invalidValues.add(invalidValue7);
invalidValues.add(new AtlasStruct()); // no values for mandatory attributes
invalidValues.add(new HashMap<Object, Object>()); // no values for mandatory attributes
invalidValues.add(new HashMap<>()); // no values for mandatory attributes
invalidValues.add(1); // incorrect datatype
invalidValues.add(new HashSet()); // incorrect datatype
invalidValues.add(new ArrayList()); // incorrect datatype
......@@ -168,7 +168,7 @@ public class TestAtlasStructType {
@Test
public void testStructTypeValidateValue() {
List<String> messages = new ArrayList<String>();
List<String> messages = new ArrayList<>();
for (Object value : validValues) {
assertTrue(structType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value);
......
......@@ -277,7 +277,7 @@ public class KafkaNotification extends AbstractNotification implements Service {
protected <T> org.apache.atlas.kafka.KafkaConsumer<T>
createKafkaConsumer(Class<T> type, MessageDeserializer<T> deserializer, KafkaStream stream,
int consumerId, ConsumerConnector consumerConnector, boolean autoCommitEnabled) {
return new org.apache.atlas.kafka.KafkaConsumer<T>(deserializer, stream,
return new org.apache.atlas.kafka.KafkaConsumer<>(deserializer, stream,
consumerId, consumerConnector, autoCommitEnabled);
}
......
......@@ -45,7 +45,7 @@ final class AtlasPluginClassLoaderUtil {
LOG.debug("==> AtlasPluginClassLoaderUtil.getFilesInDirectories()");
}
List<URL> ret = new ArrayList<URL>();
List<URL> ret = new ArrayList<>();
for (String libDir : libDirs) {
getFilesInDirectory(libDir, ret);
......
......@@ -9,6 +9,7 @@ ATLAS-1060 Add composite indexes for exact match performance improvements for al
ATLAS-1127 Modify creation and modification timestamps to Date instead of Long(sumasai)
ALL CHANGES:
ATLAS-1304 Redundant code removal and code simplification (apoorvnaik via mneethiraj)
ATLAS-1345 Enhance search APIs to resolve hierarchical references (apoorvnaik via sumasai)
ATLAS-1287 Subtasks: ATLAS-1288/ATLAS-1289 Integrated V2 API for Lineage,Entity Details,Tag assign to entity,Tags listing,tag create (kevalbhatt)
ATLAS-1303 Update hashCode and equals method to use standard JDK libraries (apoorvnaik via svimal2106)
......
......@@ -101,9 +101,9 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
Configuration configuration = getConfiguration();
bindAuditRepository(binder(), configuration);
bind(DeleteHandler.class).to((Class<? extends DeleteHandler>) AtlasRepositoryConfiguration.getDeleteHandlerImpl()).asEagerSingleton();
bind(DeleteHandler.class).to(AtlasRepositoryConfiguration.getDeleteHandlerImpl()).asEagerSingleton();
bind(TypeCache.class).to((Class<? extends TypeCache>) AtlasRepositoryConfiguration.getTypeCache()).asEagerSingleton();
bind(TypeCache.class).to(AtlasRepositoryConfiguration.getTypeCache()).asEagerSingleton();
//Add EntityAuditListener as EntityChangeListener
Multibinder<EntityChangeListener> entityChangeListenerBinder =
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment