Commit 1620284e by apoorvnaik Committed by Madhan Neethiraj

ATLAS-1304: Redundant code removal and code simplification

parent f6e27b59
...@@ -20,8 +20,6 @@ package org.apache.atlas.falcon.event; ...@@ -20,8 +20,6 @@ package org.apache.atlas.falcon.event;
import org.apache.falcon.entity.v0.Entity; import org.apache.falcon.entity.v0.Entity;
import java.util.Date;
/** /**
* Falcon event to interface with Atlas Service. * Falcon event to interface with Atlas Service.
*/ */
......
...@@ -178,7 +178,7 @@ public class FalconHookIT { ...@@ -178,7 +178,7 @@ public class FalconHookIT {
String inputId = ((List<Id>) processEntity.get("inputs")).get(0).getId()._getId(); String inputId = ((List<Id>) processEntity.get("inputs")).get(0).getId()._getId();
Referenceable pathEntity = atlasClient.getEntity(inputId); Referenceable pathEntity = atlasClient.getEntity(inputId);
assertEquals(pathEntity.getTypeName(), HiveMetaStoreBridge.HDFS_PATH.toString()); assertEquals(pathEntity.getTypeName(), HiveMetaStoreBridge.HDFS_PATH);
List<Location> locations = FeedHelper.getLocations(feedCluster, feed); List<Location> locations = FeedHelper.getLocations(feedCluster, feed);
Location dataLocation = FileSystemStorage.getLocation(locations, LocationType.DATA); Location dataLocation = FileSystemStorage.getLocation(locations, LocationType.DATA);
......
...@@ -575,7 +575,7 @@ public class HiveMetaStoreBridge { ...@@ -575,7 +575,7 @@ public class HiveMetaStoreBridge {
} }
public Referenceable fillHDFSDataSet(String pathUri) { public Referenceable fillHDFSDataSet(String pathUri) {
Referenceable ref = new Referenceable(HDFS_PATH.toString()); Referenceable ref = new Referenceable(HDFS_PATH);
ref.set("path", pathUri); ref.set("path", pathUri);
Path path = new Path(pathUri); Path path = new Path(pathUri);
ref.set(AtlasClient.NAME, Path.getPathWithoutSchemeAndAuthority(path).toString().toLowerCase()); ref.set(AtlasClient.NAME, Path.getPathWithoutSchemeAndAuthority(path).toString().toLowerCase());
......
...@@ -37,12 +37,7 @@ import org.apache.hadoop.hive.metastore.TableType; ...@@ -37,12 +37,7 @@ import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.hooks.*; import org.apache.hadoop.hive.ql.hooks.*;
import org.apache.hadoop.hive.ql.hooks.Entity;
import org.apache.hadoop.hive.ql.hooks.Entity.Type; import org.apache.hadoop.hive.ql.hooks.Entity.Type;
import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
import org.apache.hadoop.hive.ql.hooks.HookContext;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.Table;
...@@ -360,16 +355,16 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -360,16 +355,16 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
String changedColStringOldName = oldColList.get(0).getName(); String changedColStringOldName = oldColList.get(0).getName();
String changedColStringNewName = changedColStringOldName; String changedColStringNewName = changedColStringOldName;
for (int i = 0; i < oldColList.size(); i++) { for (FieldSchema oldCol : oldColList) {
if (!newColHashMap.containsKey(oldColList.get(i))) { if (!newColHashMap.containsKey(oldCol)) {
changedColStringOldName = oldColList.get(i).getName(); changedColStringOldName = oldCol.getName();
break; break;
} }
} }
for (int i = 0; i < newColList.size(); i++) { for (FieldSchema newCol : newColList) {
if (!oldColHashMap.containsKey(newColList.get(i))) { if (!oldColHashMap.containsKey(newCol)) {
changedColStringNewName = newColList.get(i).getName(); changedColStringNewName = newCol.getName();
break; break;
} }
} }
...@@ -395,7 +390,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -395,7 +390,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
if (writeEntity.getType() == Type.TABLE) { if (writeEntity.getType() == Type.TABLE) {
Table newTable = writeEntity.getTable(); Table newTable = writeEntity.getTable();
createOrUpdateEntities(dgiBridge, event, writeEntity, true, oldTable); createOrUpdateEntities(dgiBridge, event, writeEntity, true, oldTable);
final String newQualifiedTableName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(), final String newQualifiedTableName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
newTable); newTable);
String oldColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newQualifiedTableName, oldColName); String oldColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newQualifiedTableName, oldColName);
String newColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newQualifiedTableName, newColName); String newColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newQualifiedTableName, newColName);
...@@ -424,9 +419,9 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -424,9 +419,9 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
Table newTable = writeEntity.getTable(); Table newTable = writeEntity.getTable();
//Hive sends with both old and new table names in the outputs which is weird. So skipping that with the below check //Hive sends with both old and new table names in the outputs which is weird. So skipping that with the below check
if (!newTable.getDbName().equals(oldTable.getDbName()) || !newTable.getTableName().equals(oldTable.getTableName())) { if (!newTable.getDbName().equals(oldTable.getDbName()) || !newTable.getTableName().equals(oldTable.getTableName())) {
final String oldQualifiedName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(), final String oldQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
oldTable); oldTable);
final String newQualifiedName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(), final String newQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
newTable); newTable);
//Create/update old table entity - create entity with oldQFNme and old tableName if it doesnt exist. If exists, will update //Create/update old table entity - create entity with oldQFNme and old tableName if it doesnt exist. If exists, will update
...@@ -624,7 +619,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -624,7 +619,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
// filter out select queries which do not modify data // filter out select queries which do not modify data
if (!isSelectQuery) { if (!isSelectQuery) {
SortedSet<ReadEntity> sortedHiveInputs = new TreeSet<>(entityComparator);; SortedSet<ReadEntity> sortedHiveInputs = new TreeSet<>(entityComparator);
if ( event.getInputs() != null) { if ( event.getInputs() != null) {
sortedHiveInputs.addAll(event.getInputs()); sortedHiveInputs.addAll(event.getInputs());
} }
...@@ -671,7 +666,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -671,7 +666,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
private <T extends Entity> void processHiveEntity(HiveMetaStoreBridge dgiBridge, HiveEventContext event, T entity, Set<String> dataSetsProcessed, private <T extends Entity> void processHiveEntity(HiveMetaStoreBridge dgiBridge, HiveEventContext event, T entity, Set<String> dataSetsProcessed,
SortedMap<T, Referenceable> dataSets, Set<Referenceable> entities) throws Exception { SortedMap<T, Referenceable> dataSets, Set<Referenceable> entities) throws Exception {
if (entity.getType() == Type.TABLE || entity.getType() == Type.PARTITION) { if (entity.getType() == Type.TABLE || entity.getType() == Type.PARTITION) {
final String tblQFName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(), entity.getTable()); final String tblQFName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(), entity.getTable());
if (!dataSetsProcessed.contains(tblQFName)) { if (!dataSetsProcessed.contains(tblQFName)) {
LinkedHashMap<Type, Referenceable> result = createOrUpdateEntities(dgiBridge, event, entity, false); LinkedHashMap<Type, Referenceable> result = createOrUpdateEntities(dgiBridge, event, entity, false);
dataSets.put(entity, result.get(Type.TABLE)); dataSets.put(entity, result.get(Type.TABLE));
...@@ -754,14 +749,11 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -754,14 +749,11 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
} }
private static boolean isCreateOp(HiveEventContext hiveEvent) { private static boolean isCreateOp(HiveEventContext hiveEvent) {
if (HiveOperation.CREATETABLE.equals(hiveEvent.getOperation()) return HiveOperation.CREATETABLE.equals(hiveEvent.getOperation())
|| HiveOperation.CREATEVIEW.equals(hiveEvent.getOperation()) || HiveOperation.CREATEVIEW.equals(hiveEvent.getOperation())
|| HiveOperation.ALTERVIEW_AS.equals(hiveEvent.getOperation()) || HiveOperation.ALTERVIEW_AS.equals(hiveEvent.getOperation())
|| HiveOperation.ALTERTABLE_LOCATION.equals(hiveEvent.getOperation()) || HiveOperation.ALTERTABLE_LOCATION.equals(hiveEvent.getOperation())
|| HiveOperation.CREATETABLE_AS_SELECT.equals(hiveEvent.getOperation())) { || HiveOperation.CREATETABLE_AS_SELECT.equals(hiveEvent.getOperation());
return true;
}
return false;
} }
private Referenceable getProcessReferenceable(HiveMetaStoreBridge dgiBridge, HiveEventContext hiveEvent, private Referenceable getProcessReferenceable(HiveMetaStoreBridge dgiBridge, HiveEventContext hiveEvent,
...@@ -973,8 +965,8 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { ...@@ -973,8 +965,8 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
} }
private static boolean addQueryType(HiveOperation op, WriteEntity entity) { private static boolean addQueryType(HiveOperation op, WriteEntity entity) {
if (((WriteEntity) entity).getWriteType() != null && HiveOperation.QUERY.equals(op)) { if (entity.getWriteType() != null && HiveOperation.QUERY.equals(op)) {
switch (((WriteEntity) entity).getWriteType()) { switch (entity.getWriteType()) {
case INSERT: case INSERT:
case INSERT_OVERWRITE: case INSERT_OVERWRITE:
case UPDATE: case UPDATE:
......
...@@ -17,8 +17,6 @@ ...@@ -17,8 +17,6 @@
*/ */
package org.apache.atlas.hive.rewrite; package org.apache.atlas.hive.rewrite;
import org.apache.hadoop.hive.ql.parse.ParseException;
public class RewriteException extends Exception { public class RewriteException extends Exception {
public RewriteException(final String message, final Exception exception) { public RewriteException(final String message, final Exception exception) {
super(message, exception); super(message, exception);
......
...@@ -215,8 +215,8 @@ public class HiveITBase { ...@@ -215,8 +215,8 @@ public class HiveITBase {
protected void validateHDFSPaths(Referenceable processReference, String attributeName, String... testPaths) throws Exception { protected void validateHDFSPaths(Referenceable processReference, String attributeName, String... testPaths) throws Exception {
List<Id> hdfsPathRefs = (List<Id>) processReference.get(attributeName); List<Id> hdfsPathRefs = (List<Id>) processReference.get(attributeName);
for (int i = 0; i < testPaths.length; i++) { for (String testPath : testPaths) {
final Path path = new Path(testPaths[i]); final Path path = new Path(testPath);
final String testPathNormed = lower(path.toString()); final String testPathNormed = lower(path.toString());
String hdfsPathId = assertHDFSPathIsRegistered(testPathNormed); String hdfsPathId = assertHDFSPathIsRegistered(testPathNormed);
Assert.assertEquals(hdfsPathRefs.get(0)._getId(), hdfsPathId); Assert.assertEquals(hdfsPathRefs.get(0)._getId(), hdfsPathId);
......
...@@ -208,7 +208,7 @@ public class HiveMetaStoreBridgeTest { ...@@ -208,7 +208,7 @@ public class HiveMetaStoreBridgeTest {
when(atlasClient.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")).thenReturn(createTableReference()); when(atlasClient.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")).thenReturn(createTableReference());
String processQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, hiveTables.get(1)); String processQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, hiveTables.get(1));
when(atlasClient.getEntity(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, when(atlasClient.getEntity(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
processQualifiedName)).thenReturn(getEntityReference(HiveDataTypes.HIVE_PROCESS.getName(), "82e06b34-9151-4023-aa9d-b82103a50e77"));; processQualifiedName)).thenReturn(getEntityReference(HiveDataTypes.HIVE_PROCESS.getName(), "82e06b34-9151-4023-aa9d-b82103a50e77"));
HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(CLUSTER_NAME, hiveClient, atlasClient); HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(CLUSTER_NAME, hiveClient, atlasClient);
try { try {
......
...@@ -533,7 +533,7 @@ public class HiveHookIT extends HiveITBase { ...@@ -533,7 +533,7 @@ public class HiveHookIT extends HiveITBase {
Referenceable processRef1 = validateProcess(event, expectedInputs, outputs); Referenceable processRef1 = validateProcess(event, expectedInputs, outputs);
//Test sorting of tbl names //Test sorting of tbl names
SortedSet<String> sortedTblNames = new TreeSet<String>(); SortedSet<String> sortedTblNames = new TreeSet<>();
sortedTblNames.add(inputTable1Name.toLowerCase()); sortedTblNames.add(inputTable1Name.toLowerCase());
sortedTblNames.add(inputTable2Name.toLowerCase()); sortedTblNames.add(inputTable2Name.toLowerCase());
...@@ -584,13 +584,13 @@ public class HiveHookIT extends HiveITBase { ...@@ -584,13 +584,13 @@ public class HiveHookIT extends HiveITBase {
Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE); Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE);
final Set<WriteEntity> outputs = getOutputs(pFile1, Entity.Type.DFS_DIR); final Set<WriteEntity> outputs = getOutputs(pFile1, Entity.Type.DFS_DIR);
((WriteEntity)outputs.iterator().next()).setWriteType(WriteEntity.WriteType.PATH_WRITE); outputs.iterator().next().setWriteType(WriteEntity.WriteType.PATH_WRITE);
final HiveHook.HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.QUERY, inputs, outputs); final HiveHook.HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.QUERY, inputs, outputs);
Referenceable processReference = validateProcess(hiveEventContext); Referenceable processReference = validateProcess(hiveEventContext);
validateHDFSPaths(processReference, OUTPUTS, pFile1); validateHDFSPaths(processReference, OUTPUTS, pFile1);
String tableId = assertTableIsRegistered(DEFAULT_DB, tableName); assertTableIsRegistered(DEFAULT_DB, tableName);
validateInputTables(processReference, inputs); validateInputTables(processReference, inputs);
//Rerun same query with same HDFS path //Rerun same query with same HDFS path
...@@ -630,7 +630,7 @@ public class HiveHookIT extends HiveITBase { ...@@ -630,7 +630,7 @@ public class HiveHookIT extends HiveITBase {
Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE); Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE);
final Set<WriteEntity> outputs = getOutputs(pFile1, Entity.Type.DFS_DIR); final Set<WriteEntity> outputs = getOutputs(pFile1, Entity.Type.DFS_DIR);
((WriteEntity)outputs.iterator().next()).setWriteType(WriteEntity.WriteType.PATH_WRITE); outputs.iterator().next().setWriteType(WriteEntity.WriteType.PATH_WRITE);
final Set<ReadEntity> partitionIps = new LinkedHashSet<>(inputs); final Set<ReadEntity> partitionIps = new LinkedHashSet<>(inputs);
partitionIps.addAll(getInputs(DEFAULT_DB + "@" + tableName + "@dt='" + PART_FILE + "'", Entity.Type.PARTITION)); partitionIps.addAll(getInputs(DEFAULT_DB + "@" + tableName + "@dt='" + PART_FILE + "'", Entity.Type.PARTITION));
...@@ -646,7 +646,7 @@ public class HiveHookIT extends HiveITBase { ...@@ -646,7 +646,7 @@ public class HiveHookIT extends HiveITBase {
runCommand(query); runCommand(query);
final Set<WriteEntity> pFile2Outputs = getOutputs(pFile2, Entity.Type.DFS_DIR); final Set<WriteEntity> pFile2Outputs = getOutputs(pFile2, Entity.Type.DFS_DIR);
((WriteEntity)pFile2Outputs.iterator().next()).setWriteType(WriteEntity.WriteType.PATH_WRITE); pFile2Outputs.iterator().next().setWriteType(WriteEntity.WriteType.PATH_WRITE);
//Now the process has 2 paths - one older with deleted reference to partition and another with the the latest partition //Now the process has 2 paths - one older with deleted reference to partition and another with the the latest partition
Set<WriteEntity> p2Outputs = new LinkedHashSet<WriteEntity>() {{ Set<WriteEntity> p2Outputs = new LinkedHashSet<WriteEntity>() {{
addAll(pFile2Outputs); addAll(pFile2Outputs);
...@@ -676,7 +676,7 @@ public class HiveHookIT extends HiveITBase { ...@@ -676,7 +676,7 @@ public class HiveHookIT extends HiveITBase {
Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE); Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE);
Set<WriteEntity> outputs = getOutputs(insertTableName, Entity.Type.TABLE); Set<WriteEntity> outputs = getOutputs(insertTableName, Entity.Type.TABLE);
outputs.iterator().next().setName(getQualifiedTblName(insertTableName + HiveMetaStoreBridge.TEMP_TABLE_PREFIX + SessionState.get().getSessionId())); outputs.iterator().next().setName(getQualifiedTblName(insertTableName + HiveMetaStoreBridge.TEMP_TABLE_PREFIX + SessionState.get().getSessionId()));
((WriteEntity)outputs.iterator().next()).setWriteType(WriteEntity.WriteType.INSERT); outputs.iterator().next().setWriteType(WriteEntity.WriteType.INSERT);
validateProcess(constructEvent(query, HiveOperation.QUERY, inputs, outputs)); validateProcess(constructEvent(query, HiveOperation.QUERY, inputs, outputs));
...@@ -696,7 +696,7 @@ public class HiveHookIT extends HiveITBase { ...@@ -696,7 +696,7 @@ public class HiveHookIT extends HiveITBase {
final Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE); final Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE);
final Set<WriteEntity> outputs = getOutputs(insertTableName, Entity.Type.TABLE); final Set<WriteEntity> outputs = getOutputs(insertTableName, Entity.Type.TABLE);
((WriteEntity)outputs.iterator().next()).setWriteType(WriteEntity.WriteType.INSERT); outputs.iterator().next().setWriteType(WriteEntity.WriteType.INSERT);
final Set<ReadEntity> partitionIps = new LinkedHashSet<ReadEntity>() { final Set<ReadEntity> partitionIps = new LinkedHashSet<ReadEntity>() {
{ {
...@@ -1673,7 +1673,7 @@ public class HiveHookIT extends HiveITBase { ...@@ -1673,7 +1673,7 @@ public class HiveHookIT extends HiveITBase {
private void verifyProperties(Struct referenceable, Map<String, String> expectedProps, boolean checkIfNotExists) { private void verifyProperties(Struct referenceable, Map<String, String> expectedProps, boolean checkIfNotExists) {
Map<String, String> parameters = (Map<String, String>) referenceable.get(HiveMetaStoreBridge.PARAMETERS); Map<String, String> parameters = (Map<String, String>) referenceable.get(HiveMetaStoreBridge.PARAMETERS);
if (checkIfNotExists == false) { if (!checkIfNotExists) {
//Check if properties exist //Check if properties exist
Assert.assertNotNull(parameters); Assert.assertNotNull(parameters);
for (String propKey : expectedProps.keySet()) { for (String propKey : expectedProps.keySet()) {
...@@ -1745,11 +1745,11 @@ public class HiveHookIT extends HiveITBase { ...@@ -1745,11 +1745,11 @@ public class HiveHookIT extends HiveITBase {
} }
private String getDSTypeName(Entity entity) { private String getDSTypeName(Entity entity) {
return Entity.Type.TABLE.equals(entity.getType()) ? HiveDataTypes.HIVE_TABLE.name() : HiveMetaStoreBridge.HDFS_PATH.toString(); return Entity.Type.TABLE.equals(entity.getType()) ? HiveDataTypes.HIVE_TABLE.name() : HiveMetaStoreBridge.HDFS_PATH;
} }
private <T extends Entity> SortedMap<T, Referenceable> getSortedProcessDataSets(Set<T> inputTbls) { private <T extends Entity> SortedMap<T, Referenceable> getSortedProcessDataSets(Set<T> inputTbls) {
SortedMap<T, Referenceable> inputs = new TreeMap<T, Referenceable>(entityComparator); SortedMap<T, Referenceable> inputs = new TreeMap<>(entityComparator);
if (inputTbls != null) { if (inputTbls != null) {
for (final T tbl : inputTbls) { for (final T tbl : inputTbls) {
Referenceable inputTableRef = new Referenceable(getDSTypeName(tbl), new HashMap<String, Object>() {{ Referenceable inputTableRef = new Referenceable(getDSTypeName(tbl), new HashMap<String, Object>() {{
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
package org.apache.atlas.storm.hook; package org.apache.atlas.storm.hook;
import org.apache.commons.lang.StringUtils;
import org.apache.storm.generated.Bolt; import org.apache.storm.generated.Bolt;
import org.apache.storm.generated.GlobalStreamId; import org.apache.storm.generated.GlobalStreamId;
import org.apache.storm.generated.Grouping; import org.apache.storm.generated.Grouping;
...@@ -82,8 +83,7 @@ public final class StormTopologyUtil { ...@@ -82,8 +83,7 @@ public final class StormTopologyUtil {
components.add(boltName); components.add(boltName);
components = removeSystemComponent ? removeSystemComponents(components) components = removeSystemComponent ? removeSystemComponents(components)
: components; : components;
if ((removeSystemComponent && !isSystemComponent(inputComponentId)) || if (!removeSystemComponent || !isSystemComponent(inputComponentId)) {
!removeSystemComponent) {
adjacencyMap.put(inputComponentId, components); adjacencyMap.put(inputComponentId, components);
} }
} }
...@@ -132,7 +132,7 @@ public final class StormTopologyUtil { ...@@ -132,7 +132,7 @@ public final class StormTopologyUtil {
Set<Object> objectsToSkip) Set<Object> objectsToSkip)
throws IllegalAccessException { throws IllegalAccessException {
if (objectsToSkip == null) { if (objectsToSkip == null) {
objectsToSkip = new HashSet<Object>(); objectsToSkip = new HashSet<>();
} }
Map<String, String> output = new HashMap<>(); Map<String, String> output = new HashMap<>();
...@@ -175,9 +175,7 @@ public final class StormTopologyUtil { ...@@ -175,9 +175,7 @@ public final class StormTopologyUtil {
String keyStr = getString(mapKey, false, objectsToSkip); String keyStr = getString(mapKey, false, objectsToSkip);
String valStr = getString(mapVal, false, objectsToSkip); String valStr = getString(mapVal, false, objectsToSkip);
if ((valStr == null) || (valStr.isEmpty())) { if (StringUtils.isNotEmpty(valStr)) {
continue;
} else {
output.put(String.format("%s.%s", key, keyStr), valStr); output.put(String.format("%s.%s", key, keyStr), valStr);
} }
} }
......
...@@ -18,5 +18,5 @@ ...@@ -18,5 +18,5 @@
package org.apache.atlas.authorize; package org.apache.atlas.authorize;
public enum AtlasActionTypes { public enum AtlasActionTypes {
READ, CREATE, UPDATE, DELETE; READ, CREATE, UPDATE, DELETE
} }
...@@ -25,18 +25,18 @@ public interface AtlasAuthorizer { ...@@ -25,18 +25,18 @@ public interface AtlasAuthorizer {
/** /**
* This method will load the policy file and would initialize the required data-structures. * This method will load the policy file and would initialize the required data-structures.
*/ */
public void init(); void init();
/** /**
* This method is responsible to perform the actual authorization for every REST API call. It will check if * This method is responsible to perform the actual authorization for every REST API call. It will check if
* user can perform action on resource. * user can perform action on resource.
*/ */
public boolean isAccessAllowed(AtlasAccessRequest request) throws AtlasAuthorizationException; boolean isAccessAllowed(AtlasAccessRequest request) throws AtlasAuthorizationException;
/** /**
* This method is responsible to perform the cleanup and release activities. It must be called when you are done * This method is responsible to perform the cleanup and release activities. It must be called when you are done
* with the Authorization activity and once it's called a restart would be required. Try to invoke this while * with the Authorization activity and once it's called a restart would be required. Try to invoke this while
* destroying the context. * destroying the context.
*/ */
public void cleanUp(); void cleanUp();
} }
...@@ -19,5 +19,5 @@ ...@@ -19,5 +19,5 @@
package org.apache.atlas.authorize; package org.apache.atlas.authorize;
public enum AtlasResourceTypes { public enum AtlasResourceTypes {
UNKNOWN, ENTITY, TYPE, OPERATION, TAXONOMY, TERM; UNKNOWN, ENTITY, TYPE, OPERATION, TAXONOMY, TERM
} }
...@@ -103,7 +103,7 @@ public class AtlasAuthorizationUtils { ...@@ -103,7 +103,7 @@ public class AtlasAuthorizationUtils {
* unprotected types are mapped with AtlasResourceTypes.UNKNOWN, access to these are allowed. * unprotected types are mapped with AtlasResourceTypes.UNKNOWN, access to these are allowed.
*/ */
public static Set<AtlasResourceTypes> getAtlasResourceType(String contextPath) { public static Set<AtlasResourceTypes> getAtlasResourceType(String contextPath) {
Set<AtlasResourceTypes> resourceTypes = new HashSet<AtlasResourceTypes>(); Set<AtlasResourceTypes> resourceTypes = new HashSet<>();
if (isDebugEnabled) { if (isDebugEnabled) {
LOG.debug("==> getAtlasResourceType for " + contextPath); LOG.debug("==> getAtlasResourceType for " + contextPath);
} }
......
...@@ -36,7 +36,7 @@ public class FileReaderUtil { ...@@ -36,7 +36,7 @@ public class FileReaderUtil {
if (isDebugEnabled) { if (isDebugEnabled) {
LOG.debug("==> FileReaderUtil readFile"); LOG.debug("==> FileReaderUtil readFile");
} }
List<String> list = new ArrayList<String>(); List<String> list = new ArrayList<>();
LOG.info("reading the file" + path); LOG.info("reading the file" + path);
List<String> fileLines = Files.readAllLines(Paths.get(path), Charset.forName("UTF-8")); List<String> fileLines = Files.readAllLines(Paths.get(path), Charset.forName("UTF-8"));
if (fileLines != null) { if (fileLines != null) {
......
...@@ -51,7 +51,7 @@ public class PolicyParser { ...@@ -51,7 +51,7 @@ public class PolicyParser {
if (isDebugEnabled) { if (isDebugEnabled) {
LOG.debug("==> PolicyParser getListOfAutorities"); LOG.debug("==> PolicyParser getListOfAutorities");
} }
List<AtlasActionTypes> authorities = new ArrayList<AtlasActionTypes>(); List<AtlasActionTypes> authorities = new ArrayList<>();
for (int i = 0; i < auth.length(); i++) { for (int i = 0; i < auth.length(); i++) {
char access = auth.toLowerCase().charAt(i); char access = auth.toLowerCase().charAt(i);
...@@ -86,7 +86,7 @@ public class PolicyParser { ...@@ -86,7 +86,7 @@ public class PolicyParser {
if (isDebugEnabled) { if (isDebugEnabled) {
LOG.debug("==> PolicyParser parsePolicies"); LOG.debug("==> PolicyParser parsePolicies");
} }
List<PolicyDef> policyDefs = new ArrayList<PolicyDef>(); List<PolicyDef> policyDefs = new ArrayList<>();
for (String policy : policies) { for (String policy : policies) {
PolicyDef policyDef = parsePolicy(policy); PolicyDef policyDef = parsePolicy(policy);
if (policyDef != null) { if (policyDef != null) {
...@@ -129,7 +129,7 @@ public class PolicyParser { ...@@ -129,7 +129,7 @@ public class PolicyParser {
} }
boolean isValidEntity = Pattern.matches("(.+:.+)+", entity); boolean isValidEntity = Pattern.matches("(.+:.+)+", entity);
boolean isEmpty = entity.isEmpty(); boolean isEmpty = entity.isEmpty();
if (isValidEntity == false || isEmpty == true) { if (!isValidEntity || isEmpty) {
if (isDebugEnabled) { if (isDebugEnabled) {
LOG.debug("group/user/resource not properly define in Policy"); LOG.debug("group/user/resource not properly define in Policy");
LOG.debug("<== PolicyParser validateEntity"); LOG.debug("<== PolicyParser validateEntity");
...@@ -150,7 +150,7 @@ public class PolicyParser { ...@@ -150,7 +150,7 @@ public class PolicyParser {
} }
String[] users = usersDef.split(","); String[] users = usersDef.split(",");
String[] userAndRole = null; String[] userAndRole = null;
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<String, List<AtlasActionTypes>>(); Map<String, List<AtlasActionTypes>> usersMap = new HashMap<>();
if (validateEntity(usersDef)) { if (validateEntity(usersDef)) {
for (String user : users) { for (String user : users) {
if (!Pattern.matches("(.+:.+)+", user)) { if (!Pattern.matches("(.+:.+)+", user)) {
...@@ -179,7 +179,7 @@ public class PolicyParser { ...@@ -179,7 +179,7 @@ public class PolicyParser {
} }
String[] groups = groupsDef.split("\\,"); String[] groups = groupsDef.split("\\,");
String[] groupAndRole = null; String[] groupAndRole = null;
Map<String, List<AtlasActionTypes>> groupsMap = new HashMap<String, List<AtlasActionTypes>>(); Map<String, List<AtlasActionTypes>> groupsMap = new HashMap<>();
if (validateEntity(groupsDef.trim())) { if (validateEntity(groupsDef.trim())) {
for (String group : groups) { for (String group : groups) {
if (!Pattern.matches("(.+:.+)+", group)) { if (!Pattern.matches("(.+:.+)+", group)) {
...@@ -209,7 +209,7 @@ public class PolicyParser { ...@@ -209,7 +209,7 @@ public class PolicyParser {
} }
String[] resources = resourceDef.split(","); String[] resources = resourceDef.split(",");
String[] resourceTypeAndName = null; String[] resourceTypeAndName = null;
Map<AtlasResourceTypes, List<String>> resourcesMap = new HashMap<AtlasResourceTypes, List<String>>(); Map<AtlasResourceTypes, List<String>> resourcesMap = new HashMap<>();
if (validateEntity(resourceDef)) { if (validateEntity(resourceDef)) {
for (String resource : resources) { for (String resource : resources) {
if (!Pattern.matches("(.+:.+)+", resource)) { if (!Pattern.matches("(.+:.+)+", resource)) {
...@@ -238,7 +238,7 @@ public class PolicyParser { ...@@ -238,7 +238,7 @@ public class PolicyParser {
List<String> resourceList = resourcesMap.get(resourceType); List<String> resourceList = resourcesMap.get(resourceType);
if (resourceList == null) { if (resourceList == null) {
resourceList = new ArrayList<String>(); resourceList = new ArrayList<>();
} }
resourceList.add(resourceTypeAndName[RESOURCE_NAME]); resourceList.add(resourceTypeAndName[RESOURCE_NAME]);
resourcesMap.put(resourceType, resourceList); resourcesMap.put(resourceType, resourceList);
......
...@@ -40,7 +40,7 @@ public class PolicyUtil { ...@@ -40,7 +40,7 @@ public class PolicyUtil {
+ " & " + principalType); + " & " + principalType);
} }
Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap =
new HashMap<String, Map<AtlasResourceTypes, List<String>>>(); new HashMap<>();
// Iterate over the list of policies to create map // Iterate over the list of policies to create map
for (PolicyDef policyDef : policyDefList) { for (PolicyDef policyDef : policyDefList) {
...@@ -63,7 +63,7 @@ public class PolicyUtil { ...@@ -63,7 +63,7 @@ public class PolicyUtil {
if (isDebugEnabled) { if (isDebugEnabled) {
LOG.debug("Resource list not found for " + username + ", creating it"); LOG.debug("Resource list not found for " + username + ", creating it");
} }
userResourceList = new HashMap<AtlasResourceTypes, List<String>>(); userResourceList = new HashMap<>();
} }
/* /*
* Iterate over resources from the current policy def and update the resource list for the current user * Iterate over resources from the current policy def and update the resource list for the current user
...@@ -77,7 +77,7 @@ public class PolicyUtil { ...@@ -77,7 +77,7 @@ public class PolicyUtil {
if (resourceList == null) { if (resourceList == null) {
// if the resource list was not added for this type then // if the resource list was not added for this type then
// create and add all the resources in this policy // create and add all the resources in this policy
resourceList = new ArrayList<String>(); resourceList = new ArrayList<>();
resourceList.addAll(resourceTypeMap.getValue()); resourceList.addAll(resourceTypeMap.getValue());
} else { } else {
// if the resource list is present then merge both the // if the resource list is present then merge both the
......
...@@ -45,7 +45,7 @@ import com.google.common.annotations.VisibleForTesting; ...@@ -45,7 +45,7 @@ import com.google.common.annotations.VisibleForTesting;
public final class SimpleAtlasAuthorizer implements AtlasAuthorizer { public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
public enum AtlasAccessorTypes { public enum AtlasAccessorTypes {
USER, GROUP; USER, GROUP
} }
private static final Logger LOG = LoggerFactory.getLogger(SimpleAtlasAuthorizer.class); private static final Logger LOG = LoggerFactory.getLogger(SimpleAtlasAuthorizer.class);
...@@ -133,8 +133,8 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer { ...@@ -133,8 +133,8 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
+ "\nResource :: " + resource); + "\nResource :: " + resource);
boolean isAccessAllowed = false; boolean isAccessAllowed = false;
boolean isUser = user == null ? false : true; boolean isUser = user != null;
boolean isGroup = groups == null ? false : true; boolean isGroup = groups != null;
if ((!isUser && !isGroup) || action == null || resource == null) { if ((!isUser && !isGroup) || action == null || resource == null) {
if (isDebugEnabled) { if (isDebugEnabled) {
...@@ -149,26 +149,22 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer { ...@@ -149,26 +149,22 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
case READ: case READ:
isAccessAllowed = checkAccess(user, resourceTypes, resource, userReadMap); isAccessAllowed = checkAccess(user, resourceTypes, resource, userReadMap);
isAccessAllowed = isAccessAllowed =
isAccessAllowed == false ? checkAccessForGroups(groups, resourceTypes, resource, groupReadMap) isAccessAllowed || checkAccessForGroups(groups, resourceTypes, resource, groupReadMap);
: isAccessAllowed;
break; break;
case CREATE: case CREATE:
isAccessAllowed = checkAccess(user, resourceTypes, resource, userWriteMap); isAccessAllowed = checkAccess(user, resourceTypes, resource, userWriteMap);
isAccessAllowed = isAccessAllowed =
isAccessAllowed == false ? checkAccessForGroups(groups, resourceTypes, resource, groupWriteMap) isAccessAllowed || checkAccessForGroups(groups, resourceTypes, resource, groupWriteMap);
: isAccessAllowed;
break; break;
case UPDATE: case UPDATE:
isAccessAllowed = checkAccess(user, resourceTypes, resource, userUpdateMap); isAccessAllowed = checkAccess(user, resourceTypes, resource, userUpdateMap);
isAccessAllowed = isAccessAllowed =
isAccessAllowed == false isAccessAllowed || checkAccessForGroups(groups, resourceTypes, resource, groupUpdateMap);
? checkAccessForGroups(groups, resourceTypes, resource, groupUpdateMap) : isAccessAllowed;
break; break;
case DELETE: case DELETE:
isAccessAllowed = checkAccess(user, resourceTypes, resource, userDeleteMap); isAccessAllowed = checkAccess(user, resourceTypes, resource, userDeleteMap);
isAccessAllowed = isAccessAllowed =
isAccessAllowed == false isAccessAllowed || checkAccessForGroups(groups, resourceTypes, resource, groupDeleteMap);
? checkAccessForGroups(groups, resourceTypes, resource, groupDeleteMap) : isAccessAllowed;
break; break;
default: default:
if (isDebugEnabled) { if (isDebugEnabled) {
...@@ -249,7 +245,7 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer { ...@@ -249,7 +245,7 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
boolean optWildCard = true; boolean optWildCard = true;
List<String> policyValues = new ArrayList<String>(); List<String> policyValues = new ArrayList<>();
if (policyResource != null) { if (policyResource != null) {
boolean isWildCardPresent = !optWildCard; boolean isWildCardPresent = !optWildCard;
...@@ -302,8 +298,7 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer { ...@@ -302,8 +298,7 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
} }
} }
if (isMatch == false) { if (!isMatch) {
if (isDebugEnabled) { if (isDebugEnabled) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append("["); sb.append("[");
...@@ -327,8 +322,7 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer { ...@@ -327,8 +322,7 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer {
} }
private boolean isAllValuesRequested(String resource) { private boolean isAllValuesRequested(String resource) {
boolean result = StringUtils.isEmpty(resource) || WILDCARD_ASTERISK.equals(resource); return StringUtils.isEmpty(resource) || WILDCARD_ASTERISK.equals(resource);
return result;
} }
@Override @Override
......
...@@ -33,42 +33,42 @@ public class PolicyParserTest { ...@@ -33,42 +33,42 @@ public class PolicyParserTest {
@Test @Test
public void testParsePoliciesWithAllProperties() { public void testParsePoliciesWithAllProperties() {
List<String> policies = new ArrayList<String>(); List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;entity:*abc,operation:*xyz,type:PII"); policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;entity:*abc,operation:*xyz,type:PII");
/* Creating group data */ /* Creating group data */
Map<String, List<AtlasActionTypes>> groupMap = new HashMap<String, List<AtlasActionTypes>>(); Map<String, List<AtlasActionTypes>> groupMap = new HashMap<>();
List<AtlasActionTypes> accessList1 = new ArrayList<AtlasActionTypes>(); List<AtlasActionTypes> accessList1 = new ArrayList<>();
accessList1.add(AtlasActionTypes.READ); accessList1.add(AtlasActionTypes.READ);
accessList1.add(AtlasActionTypes.CREATE); accessList1.add(AtlasActionTypes.CREATE);
accessList1.add(AtlasActionTypes.UPDATE); accessList1.add(AtlasActionTypes.UPDATE);
groupMap.put("grp1", accessList1); groupMap.put("grp1", accessList1);
List<AtlasActionTypes> accessList2 = new ArrayList<AtlasActionTypes>(); List<AtlasActionTypes> accessList2 = new ArrayList<>();
accessList2.add(AtlasActionTypes.UPDATE); accessList2.add(AtlasActionTypes.UPDATE);
groupMap.put("grp2", accessList2); groupMap.put("grp2", accessList2);
/* Creating user data */ /* Creating user data */
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<String, List<AtlasActionTypes>>(); Map<String, List<AtlasActionTypes>> usersMap = new HashMap<>();
List<AtlasActionTypes> usr1AccessList = new ArrayList<AtlasActionTypes>(); List<AtlasActionTypes> usr1AccessList = new ArrayList<>();
usr1AccessList.add(AtlasActionTypes.READ); usr1AccessList.add(AtlasActionTypes.READ);
usersMap.put("usr1", usr1AccessList); usersMap.put("usr1", usr1AccessList);
List<AtlasActionTypes> usr2AccessList = new ArrayList<AtlasActionTypes>(); List<AtlasActionTypes> usr2AccessList = new ArrayList<>();
usr2AccessList.add(AtlasActionTypes.READ); usr2AccessList.add(AtlasActionTypes.READ);
usr2AccessList.add(AtlasActionTypes.CREATE); usr2AccessList.add(AtlasActionTypes.CREATE);
usersMap.put("usr2", usr2AccessList); usersMap.put("usr2", usr2AccessList);
/* Creating resources data */ /* Creating resources data */
Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<AtlasResourceTypes, List<String>>(); Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<>();
List<String> resource1List = new ArrayList<String>(); List<String> resource1List = new ArrayList<>();
resource1List.add("*abc"); resource1List.add("*abc");
resourceMap.put(AtlasResourceTypes.ENTITY, resource1List); resourceMap.put(AtlasResourceTypes.ENTITY, resource1List);
List<String> resource2List = new ArrayList<String>(); List<String> resource2List = new ArrayList<>();
resource2List.add("*xyz"); resource2List.add("*xyz");
resourceMap.put(AtlasResourceTypes.OPERATION, resource2List); resourceMap.put(AtlasResourceTypes.OPERATION, resource2List);
List<String> resource3List = new ArrayList<String>(); List<String> resource3List = new ArrayList<>();
resource3List.add("PII"); resource3List.add("PII");
resourceMap.put(AtlasResourceTypes.TYPE, resource3List); resourceMap.put(AtlasResourceTypes.TYPE, resource3List);
...@@ -86,34 +86,34 @@ public class PolicyParserTest { ...@@ -86,34 +86,34 @@ public class PolicyParserTest {
@Test @Test
public void testParsePoliciesWithOutUserProperties() { public void testParsePoliciesWithOutUserProperties() {
List<String> policies = new ArrayList<String>(); List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;;;grp1:rwu,grp2:u;;entity:*abc,operation:*xyz,type:PII"); policies.add("hivePolicy;;;;grp1:rwu,grp2:u;;entity:*abc,operation:*xyz,type:PII");
// Creating group data // Creating group data
Map<String, List<AtlasActionTypes>> groupMap = new HashMap<String, List<AtlasActionTypes>>(); Map<String, List<AtlasActionTypes>> groupMap = new HashMap<>();
List<AtlasActionTypes> accessList1 = new ArrayList<AtlasActionTypes>(); List<AtlasActionTypes> accessList1 = new ArrayList<>();
accessList1.add(AtlasActionTypes.READ); accessList1.add(AtlasActionTypes.READ);
accessList1.add(AtlasActionTypes.CREATE); accessList1.add(AtlasActionTypes.CREATE);
accessList1.add(AtlasActionTypes.UPDATE); accessList1.add(AtlasActionTypes.UPDATE);
groupMap.put("grp1", accessList1); groupMap.put("grp1", accessList1);
List<AtlasActionTypes> accessList2 = new ArrayList<AtlasActionTypes>(); List<AtlasActionTypes> accessList2 = new ArrayList<>();
accessList2.add(AtlasActionTypes.UPDATE); accessList2.add(AtlasActionTypes.UPDATE);
groupMap.put("grp2", accessList2); groupMap.put("grp2", accessList2);
// Creating user data // Creating user data
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<String, List<AtlasActionTypes>>(); Map<String, List<AtlasActionTypes>> usersMap = new HashMap<>();
// Creating resources data // Creating resources data
Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<AtlasResourceTypes, List<String>>(); Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<>();
List<String> resource1List = new ArrayList<String>(); List<String> resource1List = new ArrayList<>();
resource1List.add("*abc"); resource1List.add("*abc");
resourceMap.put(AtlasResourceTypes.ENTITY, resource1List); resourceMap.put(AtlasResourceTypes.ENTITY, resource1List);
List<String> resource2List = new ArrayList<String>(); List<String> resource2List = new ArrayList<>();
resource2List.add("*xyz"); resource2List.add("*xyz");
resourceMap.put(AtlasResourceTypes.OPERATION, resource2List); resourceMap.put(AtlasResourceTypes.OPERATION, resource2List);
List<String> resource3List = new ArrayList<String>(); List<String> resource3List = new ArrayList<>();
resource3List.add("PII"); resource3List.add("PII");
resourceMap.put(AtlasResourceTypes.TYPE, resource3List); resourceMap.put(AtlasResourceTypes.TYPE, resource3List);
...@@ -131,33 +131,33 @@ public class PolicyParserTest { ...@@ -131,33 +131,33 @@ public class PolicyParserTest {
@Test @Test
public void testParsePoliciesWithOutGroupProperties() { public void testParsePoliciesWithOutGroupProperties() {
List<String> policies = new ArrayList<String>(); List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;;;entity:*abc,operation:*xyz,type:PII"); policies.add("hivePolicy;;usr1:r,usr2:rw;;;;entity:*abc,operation:*xyz,type:PII");
// Creating group data // Creating group data
Map<String, List<AtlasActionTypes>> groupMap = new HashMap<String, List<AtlasActionTypes>>(); Map<String, List<AtlasActionTypes>> groupMap = new HashMap<>();
// Creating user data // Creating user data
Map<String, List<AtlasActionTypes>> usersMap = new HashMap<String, List<AtlasActionTypes>>(); Map<String, List<AtlasActionTypes>> usersMap = new HashMap<>();
List<AtlasActionTypes> usr1AccessList = new ArrayList<AtlasActionTypes>(); List<AtlasActionTypes> usr1AccessList = new ArrayList<>();
usr1AccessList.add(AtlasActionTypes.READ); usr1AccessList.add(AtlasActionTypes.READ);
usersMap.put("usr1", usr1AccessList); usersMap.put("usr1", usr1AccessList);
List<AtlasActionTypes> usr2AccessList = new ArrayList<AtlasActionTypes>(); List<AtlasActionTypes> usr2AccessList = new ArrayList<>();
usr2AccessList.add(AtlasActionTypes.READ); usr2AccessList.add(AtlasActionTypes.READ);
usr2AccessList.add(AtlasActionTypes.CREATE); usr2AccessList.add(AtlasActionTypes.CREATE);
usersMap.put("usr2", usr2AccessList); usersMap.put("usr2", usr2AccessList);
// Creating resources data // Creating resources data
Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<AtlasResourceTypes, List<String>>(); Map<AtlasResourceTypes, List<String>> resourceMap = new HashMap<>();
List<String> resource1List = new ArrayList<String>(); List<String> resource1List = new ArrayList<>();
resource1List.add("*abc"); resource1List.add("*abc");
resourceMap.put(AtlasResourceTypes.ENTITY, resource1List); resourceMap.put(AtlasResourceTypes.ENTITY, resource1List);
List<String> resource2List = new ArrayList<String>(); List<String> resource2List = new ArrayList<>();
resource2List.add("*xyz"); resource2List.add("*xyz");
resourceMap.put(AtlasResourceTypes.OPERATION, resource2List); resourceMap.put(AtlasResourceTypes.OPERATION, resource2List);
List<String> resource3List = new ArrayList<String>(); List<String> resource3List = new ArrayList<>();
resource3List.add("PII"); resource3List.add("PII");
resourceMap.put(AtlasResourceTypes.TYPE, resource3List); resourceMap.put(AtlasResourceTypes.TYPE, resource3List);
......
...@@ -35,24 +35,24 @@ public class PolicyUtilTest { ...@@ -35,24 +35,24 @@ public class PolicyUtilTest {
@Test @Test
public void testCreatePermissionMap() { public void testCreatePermissionMap() {
HashMap<AtlasResourceTypes, List<String>> resourceMap = new HashMap<AtlasResourceTypes, List<String>>(); HashMap<AtlasResourceTypes, List<String>> resourceMap = new HashMap<>();
List<String> resource1List = new ArrayList<String>(); List<String> resource1List = new ArrayList<>();
resource1List.add("*abc"); resource1List.add("*abc");
resourceMap.put(AtlasResourceTypes.ENTITY, resource1List); resourceMap.put(AtlasResourceTypes.ENTITY, resource1List);
List<String> resource2List = new ArrayList<String>(); List<String> resource2List = new ArrayList<>();
resource2List.add("*xyz"); resource2List.add("*xyz");
resourceMap.put(AtlasResourceTypes.OPERATION, resource2List); resourceMap.put(AtlasResourceTypes.OPERATION, resource2List);
List<String> resource3List = new ArrayList<String>(); List<String> resource3List = new ArrayList<>();
resource3List.add("PII"); resource3List.add("PII");
resourceMap.put(AtlasResourceTypes.TYPE, resource3List); resourceMap.put(AtlasResourceTypes.TYPE, resource3List);
Map<String, HashMap<AtlasResourceTypes, List<String>>> permissionMap = Map<String, HashMap<AtlasResourceTypes, List<String>>> permissionMap =
new HashMap<String, HashMap<AtlasResourceTypes, List<String>>>(); new HashMap<>();
permissionMap.put("grp1", resourceMap); permissionMap.put("grp1", resourceMap);
List<String> policies = new ArrayList<String>(); List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;entity:*abc,operation:*xyz,type:PII"); policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;entity:*abc,operation:*xyz,type:PII");
List<PolicyDef> policyDefList = new PolicyParser().parsePolicies(policies); List<PolicyDef> policyDefList = new PolicyParser().parsePolicies(policies);
...@@ -66,25 +66,25 @@ public class PolicyUtilTest { ...@@ -66,25 +66,25 @@ public class PolicyUtilTest {
@Test @Test
public void testMergeCreatePermissionMap() { public void testMergeCreatePermissionMap() {
HashMap<AtlasResourceTypes, List<String>> resourceMap = new HashMap<AtlasResourceTypes, List<String>>(); HashMap<AtlasResourceTypes, List<String>> resourceMap = new HashMap<>();
List<String> resource1List = new ArrayList<String>(); List<String> resource1List = new ArrayList<>();
resource1List.add("*abc"); resource1List.add("*abc");
resourceMap.put(AtlasResourceTypes.ENTITY, resource1List); resourceMap.put(AtlasResourceTypes.ENTITY, resource1List);
List<String> resource2List = new ArrayList<String>(); List<String> resource2List = new ArrayList<>();
resource2List.add("*x"); resource2List.add("*x");
resource2List.add("*xyz"); resource2List.add("*xyz");
resourceMap.put(AtlasResourceTypes.OPERATION, resource2List); resourceMap.put(AtlasResourceTypes.OPERATION, resource2List);
List<String> resource3List = new ArrayList<String>(); List<String> resource3List = new ArrayList<>();
resource3List.add("PII"); resource3List.add("PII");
resourceMap.put(AtlasResourceTypes.TYPE, resource3List); resourceMap.put(AtlasResourceTypes.TYPE, resource3List);
Map<String, HashMap<AtlasResourceTypes, List<String>>> permissionMap = Map<String, HashMap<AtlasResourceTypes, List<String>>> permissionMap =
new HashMap<String, HashMap<AtlasResourceTypes, List<String>>>(); new HashMap<>();
permissionMap.put("grp1", resourceMap); permissionMap.put("grp1", resourceMap);
List<String> policies = new ArrayList<String>(); List<String> policies = new ArrayList<>();
policies.add("hivePolicys;;;;grp1:rwu;;entity:*abc,operation:*xyz,operation:*x"); policies.add("hivePolicys;;;;grp1:rwu;;entity:*abc,operation:*xyz,operation:*x");
policies.add("hivePolicy;;;;grp1:rwu;;entity:*abc,operation:*xyz"); policies.add("hivePolicy;;;;grp1:rwu;;entity:*abc,operation:*xyz");
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu;;entity:*abc,operation:*xyz"); policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu;;entity:*abc,operation:*xyz");
......
...@@ -38,7 +38,7 @@ public class SimpleAtlasAuthorizerTest { ...@@ -38,7 +38,7 @@ public class SimpleAtlasAuthorizerTest {
Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null; Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null;
Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null; Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null;
List<String> policies = new ArrayList<String>(); List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:*abc,type:PII"); policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:*abc,type:PII");
List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies); List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies);
...@@ -50,13 +50,13 @@ public class SimpleAtlasAuthorizerTest { ...@@ -50,13 +50,13 @@ public class SimpleAtlasAuthorizerTest {
userReadMap = policyUtil.createPermissionMap(policyDefs, userReadMap = policyUtil.createPermissionMap(policyDefs,
AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER); AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER);
Set<AtlasResourceTypes> resourceType = new HashSet<AtlasResourceTypes>(); Set<AtlasResourceTypes> resourceType = new HashSet<>();
resourceType.add(AtlasResourceTypes.TYPE); resourceType.add(AtlasResourceTypes.TYPE);
String resource = "xsdfhjabc"; String resource = "xsdfhjabc";
AtlasActionTypes action = AtlasActionTypes.READ; AtlasActionTypes action = AtlasActionTypes.READ;
String user = "usr1"; String user = "usr1";
Set<String> userGroups = new HashSet<String>(); Set<String> userGroups = new HashSet<>();
userGroups.add("grp3"); userGroups.add("grp3");
try { try {
AtlasAccessRequest request = new AtlasAccessRequest(resourceType, AtlasAccessRequest request = new AtlasAccessRequest(resourceType,
...@@ -83,7 +83,7 @@ public class SimpleAtlasAuthorizerTest { ...@@ -83,7 +83,7 @@ public class SimpleAtlasAuthorizerTest {
Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null; Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null;
Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null; Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null;
List<String> policies = new ArrayList<String>(); List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:PII"); policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:PII");
List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies); List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies);
...@@ -95,12 +95,12 @@ public class SimpleAtlasAuthorizerTest { ...@@ -95,12 +95,12 @@ public class SimpleAtlasAuthorizerTest {
userReadMap = policyUtil.createPermissionMap(policyDefs, userReadMap = policyUtil.createPermissionMap(policyDefs,
AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER); AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER);
Set<AtlasResourceTypes> resourceType = new HashSet<AtlasResourceTypes>(); Set<AtlasResourceTypes> resourceType = new HashSet<>();
resourceType.add(AtlasResourceTypes.TYPE); resourceType.add(AtlasResourceTypes.TYPE);
String resource = "PII"; String resource = "PII";
AtlasActionTypes action = AtlasActionTypes.READ; AtlasActionTypes action = AtlasActionTypes.READ;
String user = "usr3"; String user = "usr3";
Set<String> userGroups = new HashSet<String>(); Set<String> userGroups = new HashSet<>();
userGroups.add("grp1"); userGroups.add("grp1");
AtlasAccessRequest request = new AtlasAccessRequest(resourceType, AtlasAccessRequest request = new AtlasAccessRequest(resourceType,
resource, action, user, userGroups); resource, action, user, userGroups);
...@@ -126,7 +126,7 @@ public class SimpleAtlasAuthorizerTest { ...@@ -126,7 +126,7 @@ public class SimpleAtlasAuthorizerTest {
Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null; Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null;
Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null; Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null;
List<String> policies = new ArrayList<String>(); List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:PII"); policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:PII");
List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies); List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies);
...@@ -138,12 +138,12 @@ public class SimpleAtlasAuthorizerTest { ...@@ -138,12 +138,12 @@ public class SimpleAtlasAuthorizerTest {
userReadMap = policyUtil.createPermissionMap(policyDefs, userReadMap = policyUtil.createPermissionMap(policyDefs,
AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER); AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER);
Set<AtlasResourceTypes> resourceType = new HashSet<AtlasResourceTypes>(); Set<AtlasResourceTypes> resourceType = new HashSet<>();
resourceType.add(AtlasResourceTypes.TYPE); resourceType.add(AtlasResourceTypes.TYPE);
String resource = "abc"; String resource = "abc";
AtlasActionTypes action = AtlasActionTypes.READ; AtlasActionTypes action = AtlasActionTypes.READ;
String user = "usr1"; String user = "usr1";
Set<String> userGroups = new HashSet<String>(); Set<String> userGroups = new HashSet<>();
userGroups.add("grp1"); userGroups.add("grp1");
AtlasAccessRequest request = new AtlasAccessRequest(resourceType, AtlasAccessRequest request = new AtlasAccessRequest(resourceType,
resource, action, user, userGroups); resource, action, user, userGroups);
...@@ -168,7 +168,7 @@ public class SimpleAtlasAuthorizerTest { ...@@ -168,7 +168,7 @@ public class SimpleAtlasAuthorizerTest {
Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null; Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap = null;
Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null; Map<String, Map<AtlasResourceTypes, List<String>>> groupReadMap = null;
List<String> policies = new ArrayList<String>(); List<String> policies = new ArrayList<>();
policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:PII"); policies.add("hivePolicy;;usr1:r,usr2:rw;;grp1:rwu,grp2:u;;type:PII");
List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies); List<PolicyDef> policyDefs = new PolicyParser().parsePolicies(policies);
...@@ -180,12 +180,12 @@ public class SimpleAtlasAuthorizerTest { ...@@ -180,12 +180,12 @@ public class SimpleAtlasAuthorizerTest {
userReadMap = policyUtil.createPermissionMap(policyDefs, userReadMap = policyUtil.createPermissionMap(policyDefs,
AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER); AtlasActionTypes.READ, SimpleAtlasAuthorizer.AtlasAccessorTypes.USER);
Set<AtlasResourceTypes> resourceType = new HashSet<AtlasResourceTypes>(); Set<AtlasResourceTypes> resourceType = new HashSet<>();
resourceType.add(AtlasResourceTypes.TYPE); resourceType.add(AtlasResourceTypes.TYPE);
String resource = "PII"; String resource = "PII";
AtlasActionTypes action = AtlasActionTypes.READ; AtlasActionTypes action = AtlasActionTypes.READ;
String user = "usr3"; String user = "usr3";
Set<String> userGroups = new HashSet<String>(); Set<String> userGroups = new HashSet<>();
userGroups.add("grp3"); userGroups.add("grp3");
AtlasAccessRequest request = new AtlasAccessRequest(resourceType, AtlasAccessRequest request = new AtlasAccessRequest(resourceType,
resource, action, user, userGroups); resource, action, user, userGroups);
......
...@@ -23,8 +23,6 @@ import org.apache.atlas.catalog.exception.InvalidPayloadException; ...@@ -23,8 +23,6 @@ import org.apache.atlas.catalog.exception.InvalidPayloadException;
import org.apache.atlas.catalog.exception.ResourceNotFoundException; import org.apache.atlas.catalog.exception.ResourceNotFoundException;
import org.apache.atlas.catalog.query.QueryFactory; import org.apache.atlas.catalog.query.QueryFactory;
import java.util.Collections;
/** /**
* Base class for resource providers. * Base class for resource providers.
*/ */
......
...@@ -85,7 +85,7 @@ public class DefaultTypeSystem implements AtlasTypeSystem { ...@@ -85,7 +85,7 @@ public class DefaultTypeSystem implements AtlasTypeSystem {
} catch(TypeNotFoundException tne) { } catch(TypeNotFoundException tne) {
//Type not found . Create //Type not found . Create
TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(), TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(type), ImmutableList.of(type),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of()); ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
metadataService.createType(TypesSerialization.toJson(typesDef)); metadataService.createType(TypesSerialization.toJson(typesDef));
} }
...@@ -198,10 +198,10 @@ public class DefaultTypeSystem implements AtlasTypeSystem { ...@@ -198,10 +198,10 @@ public class DefaultTypeSystem implements AtlasTypeSystem {
try { try {
HierarchicalTypeDefinition<T> definition = null; HierarchicalTypeDefinition<T> definition = null;
if ( isTrait) { if ( isTrait) {
definition = new HierarchicalTypeDefinition<T>(type, name, description, definition = new HierarchicalTypeDefinition<>(type, name, description,
ImmutableSet.<String>of(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE), attributes.toArray(new AttributeDefinition[attributes.size()])); ImmutableSet.of(TaxonomyResourceProvider.TAXONOMY_TERM_TYPE), attributes.toArray(new AttributeDefinition[attributes.size()]));
} else { } else {
definition = new HierarchicalTypeDefinition<T>(type, name, description, definition = new HierarchicalTypeDefinition<>(type, name, description,
ImmutableSet.<String>of(), attributes.toArray(new AttributeDefinition[attributes.size()])); ImmutableSet.<String>of(), attributes.toArray(new AttributeDefinition[attributes.size()]));
} }
......
...@@ -20,10 +20,6 @@ package org.apache.atlas.catalog; ...@@ -20,10 +20,6 @@ package org.apache.atlas.catalog;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.catalog.definition.EntityTagResourceDefinition; import org.apache.atlas.catalog.definition.EntityTagResourceDefinition;
import org.apache.atlas.catalog.definition.ResourceDefinition;
import org.apache.atlas.repository.Constants;
import java.util.Collections;
/** /**
* Wrapper for term vertices. * Wrapper for term vertices.
......
...@@ -20,7 +20,6 @@ package org.apache.atlas.catalog.definition; ...@@ -20,7 +20,6 @@ package org.apache.atlas.catalog.definition;
import com.tinkerpop.pipes.PipeFunction; import com.tinkerpop.pipes.PipeFunction;
import com.tinkerpop.pipes.transform.TransformFunctionPipe; import com.tinkerpop.pipes.transform.TransformFunctionPipe;
import org.apache.atlas.AtlasConstants;
import org.apache.atlas.catalog.Request; import org.apache.atlas.catalog.Request;
import org.apache.atlas.catalog.TaxonomyResourceProvider; import org.apache.atlas.catalog.TaxonomyResourceProvider;
import org.apache.atlas.catalog.VertexWrapper; import org.apache.atlas.catalog.VertexWrapper;
......
...@@ -88,7 +88,7 @@ public interface QueryExpression { ...@@ -88,7 +88,7 @@ public interface QueryExpression {
* *
* @param fieldName field name * @param fieldName field name
*/ */
public void setField(String fieldName); void setField(String fieldName);
/** /**
* Get the expected value for the expression. * Get the expected value for the expression.
......
...@@ -30,8 +30,6 @@ import org.testng.annotations.Test; ...@@ -30,8 +30,6 @@ import org.testng.annotations.Test;
import java.util.*; import java.util.*;
import static org.easymock.EasyMock.*; import static org.easymock.EasyMock.*;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.replay;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull; import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue; import static org.testng.Assert.assertTrue;
......
...@@ -28,6 +28,8 @@ import org.apache.commons.cli.Options; ...@@ -28,6 +28,8 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
import java.util.Arrays;
/** /**
* An application that allows users to run admin commands against an Atlas server. * An application that allows users to run admin commands against an Atlas server.
...@@ -83,7 +85,7 @@ public class AtlasAdminClient { ...@@ -83,7 +85,7 @@ public class AtlasAdminClient {
System.out.println(atlasClient.getAdminStatus()); System.out.println(atlasClient.getAdminStatus());
cmdStatus = 0; cmdStatus = 0;
} catch (AtlasServiceException e) { } catch (AtlasServiceException e) {
System.err.println("Could not retrieve status of the server at " + atlasServerUri); System.err.println("Could not retrieve status of the server at " + Arrays.toString(atlasServerUri));
printStandardHttpErrorDetails(e); printStandardHttpErrorDetails(e);
} }
} else { } else {
......
...@@ -30,7 +30,6 @@ import org.apache.hadoop.security.ssl.SSLFactory; ...@@ -30,7 +30,6 @@ import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator;
import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator; import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator;
import org.apache.hadoop.security.token.delegation.web.PseudoDelegationTokenAuthenticator;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -56,7 +55,7 @@ import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_FILE_KEY; ...@@ -56,7 +55,7 @@ import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_FILE_KEY;
*/ */
public class SecureClientUtils { public class SecureClientUtils {
public final static int DEFAULT_SOCKET_TIMEOUT = 1 * 60 * 1000; // 1 minute public final static int DEFAULT_SOCKET_TIMEOUT_IN_MSECS = 1 * 60 * 1000; // 1 minute
private static final Logger LOG = LoggerFactory.getLogger(SecureClientUtils.class); private static final Logger LOG = LoggerFactory.getLogger(SecureClientUtils.class);
...@@ -120,14 +119,14 @@ public class SecureClientUtils { ...@@ -120,14 +119,14 @@ public class SecureClientUtils {
private final static ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR = new ConnectionConfigurator() { private final static ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR = new ConnectionConfigurator() {
@Override @Override
public HttpURLConnection configure(HttpURLConnection conn) throws IOException { public HttpURLConnection configure(HttpURLConnection conn) throws IOException {
setTimeouts(conn, DEFAULT_SOCKET_TIMEOUT); setTimeouts(conn, DEFAULT_SOCKET_TIMEOUT_IN_MSECS);
return conn; return conn;
} }
}; };
private static ConnectionConfigurator newConnConfigurator(Configuration conf) { private static ConnectionConfigurator newConnConfigurator(Configuration conf) {
try { try {
return newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf); return newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT_IN_MSECS, conf);
} catch (Exception e) { } catch (Exception e) {
LOG.debug("Cannot load customized ssl related configuration. " + "Fallback to system-generic settings.", e); LOG.debug("Cannot load customized ssl related configuration. " + "Fallback to system-generic settings.", e);
return DEFAULT_TIMEOUT_CONN_CONFIGURATOR; return DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
......
...@@ -107,8 +107,7 @@ public class AtlasClientTest { ...@@ -107,8 +107,7 @@ public class AtlasClientTest {
private WebResource.Builder setupBuilder(AtlasClient.API api, WebResource webResource) { private WebResource.Builder setupBuilder(AtlasClient.API api, WebResource webResource) {
when(webResource.path(api.getPath())).thenReturn(service); when(webResource.path(api.getPath())).thenReturn(service);
WebResource.Builder builder = getBuilder(service); return getBuilder(service);
return builder;
} }
@Test @Test
......
...@@ -32,7 +32,7 @@ public class FunctionCallExpression extends AbstractGroovyExpression { ...@@ -32,7 +32,7 @@ public class FunctionCallExpression extends AbstractGroovyExpression {
private GroovyExpression target; private GroovyExpression target;
private String functionName; private String functionName;
private List<GroovyExpression> arguments = new ArrayList<GroovyExpression>(); private List<GroovyExpression> arguments = new ArrayList<>();
public FunctionCallExpression(String functionName, List<? extends GroovyExpression> arguments) { public FunctionCallExpression(String functionName, List<? extends GroovyExpression> arguments) {
this.target = null; this.target = null;
......
...@@ -231,7 +231,7 @@ public final class InMemoryJAASConfiguration extends Configuration { ...@@ -231,7 +231,7 @@ public final class InMemoryJAASConfiguration extends Configuration {
String clientId = tokenizer.nextToken(); String clientId = tokenizer.nextToken();
SortedSet<Integer> indexList = jaasClients.get(clientId); SortedSet<Integer> indexList = jaasClients.get(clientId);
if (indexList == null) { if (indexList == null) {
indexList = new TreeSet<Integer>(); indexList = new TreeSet<>();
jaasClients.put(clientId, indexList); jaasClients.put(clientId, indexList);
} }
String indexStr = tokenizer.nextToken(); String indexStr = tokenizer.nextToken();
...@@ -275,20 +275,26 @@ public final class InMemoryJAASConfiguration extends Configuration { ...@@ -275,20 +275,26 @@ public final class InMemoryJAASConfiguration extends Configuration {
AppConfigurationEntry.LoginModuleControlFlag loginControlFlag = null; AppConfigurationEntry.LoginModuleControlFlag loginControlFlag = null;
if (controlFlag != null) { if (controlFlag != null) {
controlFlag = controlFlag.trim().toLowerCase(); controlFlag = controlFlag.trim().toLowerCase();
if (controlFlag.equals("optional")) { switch (controlFlag) {
case "optional":
loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL; loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL;
} else if (controlFlag.equals("requisite")) { break;
case "requisite":
loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUISITE; loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUISITE;
} else if (controlFlag.equals("sufficient")) { break;
case "sufficient":
loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT; loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT;
} else if (controlFlag.equals("required")) { break;
case "required":
loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED; loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED;
} else { break;
default:
String validValues = "optional|requisite|sufficient|required"; String validValues = "optional|requisite|sufficient|required";
LOG.warn("Unknown JAAS configuration value for (" + keyParam LOG.warn("Unknown JAAS configuration value for (" + keyParam
+ ") = [" + controlFlag + "], valid value are [" + validValues + ") = [" + controlFlag + "], valid value are [" + validValues
+ "] using the default value, REQUIRED"); + "] using the default value, REQUIRED");
loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED; loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED;
break;
} }
} else { } else {
LOG.warn("Unable to find JAAS configuration (" LOG.warn("Unable to find JAAS configuration ("
...@@ -336,7 +342,7 @@ public final class InMemoryJAASConfiguration extends Configuration { ...@@ -336,7 +342,7 @@ public final class InMemoryJAASConfiguration extends Configuration {
List<AppConfigurationEntry> retList = applicationConfigEntryMap.get(jaasClient); List<AppConfigurationEntry> retList = applicationConfigEntryMap.get(jaasClient);
if (retList == null) { if (retList == null) {
retList = new ArrayList<AppConfigurationEntry>(); retList = new ArrayList<>();
applicationConfigEntryMap.put(jaasClient, retList); applicationConfigEntryMap.put(jaasClient, retList);
} }
......
...@@ -46,13 +46,7 @@ public final class AuthenticationUtil { ...@@ -46,13 +46,7 @@ public final class AuthenticationUtil {
} }
public static boolean isKerberosAuthenticationEnabled(Configuration atlasConf) { public static boolean isKerberosAuthenticationEnabled(Configuration atlasConf) {
boolean isKerberosAuthenticationEnabled; return atlasConf.getBoolean("atlas.authentication.method.kerberos", false);
if ("true".equalsIgnoreCase(atlasConf.getString("atlas.authentication.method.kerberos"))) {
isKerberosAuthenticationEnabled = true;
} else {
isKerberosAuthenticationEnabled = false;
}
return isKerberosAuthenticationEnabled;
} }
public static String[] getBasicAuthenticationInput() { public static String[] getBasicAuthenticationInput() {
......
...@@ -32,9 +32,9 @@ import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer; ...@@ -32,9 +32,9 @@ import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
* Util class for Properties. * Util class for Properties.
*/ */
public final class PropertiesUtil extends PropertyPlaceholderConfigurer { public final class PropertiesUtil extends PropertyPlaceholderConfigurer {
private static Map<String, String> propertiesMap = new HashMap<String, String>(); private static Map<String, String> propertiesMap = new HashMap<>();
private static Logger logger = Logger.getLogger(PropertiesUtil.class); private static Logger logger = Logger.getLogger(PropertiesUtil.class);
protected List<String> xmlPropertyConfigurer = new ArrayList<String>(); protected List<String> xmlPropertyConfigurer = new ArrayList<>();
private PropertiesUtil() { private PropertiesUtil() {
......
...@@ -103,11 +103,11 @@ define(['require', ...@@ -103,11 +103,11 @@ define(['require',
this.tagsCollection(); this.tagsCollection();
}, this); }, this);
this.listenTo(this.commonCollection, 'reset', function() { this.listenTo(this.commonCollection, 'reset', function() {
--this.asyncAttrFetchCounter --this.asyncAttrFetchCounter;
this.subAttributeData(); this.subAttributeData();
}, this); }, this);
this.listenTo(this.commonCollection, 'error', function() { this.listenTo(this.commonCollection, 'error', function() {
--this.asyncAttrFetchCounter --this.asyncAttrFetchCounter;
this.$('.attrLoader').hide(); this.$('.attrLoader').hide();
}, this); }, this);
}, },
......
...@@ -50,7 +50,7 @@ public interface AtlasGraphQuery<V, E> { ...@@ -50,7 +50,7 @@ public interface AtlasGraphQuery<V, E> {
* @param value * @param value
* @return * @return
*/ */
AtlasGraphQuery<V, E> in(String propertyKey, Collection<? extends Object> values); AtlasGraphQuery<V, E> in(String propertyKey, Collection<?> values);
/** /**
......
...@@ -41,7 +41,7 @@ public interface AtlasIndexQuery<V, E> { ...@@ -41,7 +41,7 @@ public interface AtlasIndexQuery<V, E> {
* @param <V> * @param <V>
* @param <E> * @param <E>
*/ */
public interface Result<V, E> { interface Result<V, E> {
/** /**
* Gets the vertex for this result. * Gets the vertex for this result.
......
...@@ -46,7 +46,7 @@ public interface NativeTitanGraphQuery<V, E> { ...@@ -46,7 +46,7 @@ public interface NativeTitanGraphQuery<V, E> {
* @param propertyName * @param propertyName
* @param values * @param values
*/ */
void in(String propertyName, Collection<? extends Object> values); void in(String propertyName, Collection<?> values);
/** /**
* Adds a has condition to the query. * Adds a has condition to the query.
......
...@@ -144,7 +144,7 @@ public abstract class TitanGraphQuery<V, E> implements AtlasGraphQuery<V, E> { ...@@ -144,7 +144,7 @@ public abstract class TitanGraphQuery<V, E> implements AtlasGraphQuery<V, E> {
@Override @Override
public AtlasGraphQuery<V, E> in(String propertyKey, Collection<? extends Object> values) { public AtlasGraphQuery<V, E> in(String propertyKey, Collection<?> values) {
queryCondition.andWith(new InPredicate(propertyKey, values)); queryCondition.andWith(new InPredicate(propertyKey, values));
return this; return this;
} }
......
...@@ -28,9 +28,9 @@ import org.apache.atlas.repository.graphdb.titan.query.NativeTitanGraphQuery; ...@@ -28,9 +28,9 @@ import org.apache.atlas.repository.graphdb.titan.query.NativeTitanGraphQuery;
public class InPredicate implements QueryPredicate { public class InPredicate implements QueryPredicate {
private String propertyName; private String propertyName;
private Collection<? extends Object> values; private Collection<?> values;
public InPredicate(String propertyName, Collection<? extends Object> values) { public InPredicate(String propertyName, Collection<?> values) {
super(); super();
this.propertyName = propertyName; this.propertyName = propertyName;
this.values = values; this.values = values;
......
...@@ -43,7 +43,7 @@ public class OrCondition { ...@@ -43,7 +43,7 @@ public class OrCondition {
} }
public OrCondition(boolean addInitialTerm) { public OrCondition(boolean addInitialTerm) {
this.children = new ArrayList<AndCondition>(); this.children = new ArrayList<>();
if (addInitialTerm) { if (addInitialTerm) {
children.add(new AndCondition()); children.add(new AndCondition());
} }
...@@ -96,7 +96,7 @@ public class OrCondition { ...@@ -96,7 +96,7 @@ public class OrCondition {
//it creates a new AndCondition that combines the two AndConditions together. These combined //it creates a new AndCondition that combines the two AndConditions together. These combined
//AndConditions become the new set of AndConditions in this OrCondition. //AndConditions become the new set of AndConditions in this OrCondition.
List<AndCondition> expandedExpressionChildren = new ArrayList<AndCondition>(); List<AndCondition> expandedExpressionChildren = new ArrayList<>();
for (AndCondition otherExprTerm : other.getAndTerms()) { for (AndCondition otherExprTerm : other.getAndTerms()) {
for (AndCondition currentExpr : children) { for (AndCondition currentExpr : children) {
AndCondition currentAndConditionCopy = currentExpr.copy(); AndCondition currentAndConditionCopy = currentExpr.copy();
......
...@@ -36,7 +36,7 @@ public interface HBaseCompat { ...@@ -36,7 +36,7 @@ public interface HBaseCompat {
* @param algo * @param algo
* compression type to use * compression type to use
*/ */
public void setCompression(HColumnDescriptor cd, String algo); void setCompression(HColumnDescriptor cd, String algo);
/** /**
* Create and return a HTableDescriptor instance with the given name. The * Create and return a HTableDescriptor instance with the given name. The
...@@ -50,7 +50,7 @@ public interface HBaseCompat { ...@@ -50,7 +50,7 @@ public interface HBaseCompat {
* HBase table name * HBase table name
* @return a new table descriptor instance * @return a new table descriptor instance
*/ */
public HTableDescriptor newTableDescriptor(String tableName); HTableDescriptor newTableDescriptor(String tableName);
ConnectionMask createConnection(Configuration conf) throws IOException; ConnectionMask createConnection(Configuration conf) throws IOException;
......
...@@ -67,11 +67,7 @@ public class HBaseCompatLoader { ...@@ -67,11 +67,7 @@ public class HBaseCompatLoader {
try { try {
compat = (HBaseCompat)Class.forName(className).newInstance(); compat = (HBaseCompat)Class.forName(className).newInstance();
log.info("Instantiated HBase compatibility layer {}: {}", classNameSource, compat.getClass().getCanonicalName()); log.info("Instantiated HBase compatibility layer {}: {}", classNameSource, compat.getClass().getCanonicalName());
} catch (IllegalAccessException e) { } catch (IllegalAccessException | ClassNotFoundException | InstantiationException e) {
throw new RuntimeException(e.getClass().getSimpleName() + errTemplate, e);
} catch (InstantiationException e) {
throw new RuntimeException(e.getClass().getSimpleName() + errTemplate, e);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e.getClass().getSimpleName() + errTemplate, e); throw new RuntimeException(e.getClass().getSimpleName() + errTemplate, e);
} }
......
...@@ -48,7 +48,6 @@ import javax.annotation.Nullable; ...@@ -48,7 +48,6 @@ import javax.annotation.Nullable;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.*; import java.util.*;
import java.util.Map;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
/** /**
...@@ -151,7 +150,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore { ...@@ -151,7 +150,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore {
void handleLockFailure(StoreTransaction txh, KeyColumn lockID, int trialCount) throws PermanentLockingException { void handleLockFailure(StoreTransaction txh, KeyColumn lockID, int trialCount) throws PermanentLockingException {
if (trialCount < lockMaxRetries) { if (trialCount < lockMaxRetries) {
try { try {
Thread.sleep(lockMaxWaitTimeMs.getLength(TimeUnit.DAYS.MILLISECONDS)); Thread.sleep(lockMaxWaitTimeMs.getLength(TimeUnit.MILLISECONDS));
} catch (InterruptedException e) { } catch (InterruptedException e) {
throw new PermanentLockingException( throw new PermanentLockingException(
"Interrupted while waiting for acquiring lock for transaction " "Interrupted while waiting for acquiring lock for transaction "
...@@ -199,7 +198,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore { ...@@ -199,7 +198,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore {
} }
private Map<StaticBuffer,EntryList> getHelper(List<StaticBuffer> keys, Filter getFilter) throws BackendException { private Map<StaticBuffer,EntryList> getHelper(List<StaticBuffer> keys, Filter getFilter) throws BackendException {
List<Get> requests = new ArrayList<Get>(keys.size()); List<Get> requests = new ArrayList<>(keys.size());
{ {
for (StaticBuffer key : keys) { for (StaticBuffer key : keys) {
Get g = new Get(key.as(StaticBuffer.ARRAY_FACTORY)).addFamily(columnFamilyBytes).setFilter(getFilter); Get g = new Get(key.as(StaticBuffer.ARRAY_FACTORY)).addFamily(columnFamilyBytes).setFilter(getFilter);
...@@ -212,7 +211,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore { ...@@ -212,7 +211,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore {
} }
} }
Map<StaticBuffer,EntryList> resultMap = new HashMap<StaticBuffer,EntryList>(keys.size()); Map<StaticBuffer,EntryList> resultMap = new HashMap<>(keys.size());
try { try {
TableMask table = null; TableMask table = null;
...@@ -336,7 +335,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore { ...@@ -336,7 +335,7 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore {
@Override @Override
public boolean hasNext() { public boolean hasNext() {
ensureOpen(); ensureOpen();
return kv == null ? false : kv.hasNext(); return kv != null && kv.hasNext();
} }
@Override @Override
......
...@@ -99,21 +99,21 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -99,21 +99,21 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
new ConfigNamespace(GraphDatabaseConfiguration.STORAGE_NS, "hbase", "HBase storage options"); new ConfigNamespace(GraphDatabaseConfiguration.STORAGE_NS, "hbase", "HBase storage options");
public static final ConfigOption<Boolean> SHORT_CF_NAMES = public static final ConfigOption<Boolean> SHORT_CF_NAMES =
new ConfigOption<Boolean>(HBASE_NS, "short-cf-names", new ConfigOption<>(HBASE_NS, "short-cf-names",
"Whether to shorten the names of Titan's column families to one-character mnemonics " + "Whether to shorten the names of Titan's column families to one-character mnemonics " +
"to conserve storage space", ConfigOption.Type.FIXED, true); "to conserve storage space", ConfigOption.Type.FIXED, true);
public static final String COMPRESSION_DEFAULT = "-DEFAULT-"; public static final String COMPRESSION_DEFAULT = "-DEFAULT-";
public static final ConfigOption<String> COMPRESSION = public static final ConfigOption<String> COMPRESSION =
new ConfigOption<String>(HBASE_NS, "compression-algorithm", new ConfigOption<>(HBASE_NS, "compression-algorithm",
"An HBase Compression.Algorithm enum string which will be applied to newly created column families. " + "An HBase Compression.Algorithm enum string which will be applied to newly created column families. " +
"The compression algorithm must be installed and available on the HBase cluster. Titan cannot install " + "The compression algorithm must be installed and available on the HBase cluster. Titan cannot install " +
"and configure new compression algorithms on the HBase cluster by itself.", "and configure new compression algorithms on the HBase cluster by itself.",
ConfigOption.Type.MASKABLE, "GZ"); ConfigOption.Type.MASKABLE, "GZ");
public static final ConfigOption<Boolean> SKIP_SCHEMA_CHECK = public static final ConfigOption<Boolean> SKIP_SCHEMA_CHECK =
new ConfigOption<Boolean>(HBASE_NS, "skip-schema-check", new ConfigOption<>(HBASE_NS, "skip-schema-check",
"Assume that Titan's HBase table and column families already exist. " + "Assume that Titan's HBase table and column families already exist. " +
"When this is true, Titan will not check for the existence of its table/CFs, " + "When this is true, Titan will not check for the existence of its table/CFs, " +
"nor will it attempt to create them under any circumstances. This is useful " + "nor will it attempt to create them under any circumstances. This is useful " +
...@@ -121,7 +121,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -121,7 +121,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
ConfigOption.Type.MASKABLE, false); ConfigOption.Type.MASKABLE, false);
public static final ConfigOption<String> HBASE_TABLE = public static final ConfigOption<String> HBASE_TABLE =
new ConfigOption<String>(HBASE_NS, "table", new ConfigOption<>(HBASE_NS, "table",
"The name of the table Titan will use. When " + ConfigElement.getPath(SKIP_SCHEMA_CHECK) + "The name of the table Titan will use. When " + ConfigElement.getPath(SKIP_SCHEMA_CHECK) +
" is false, Titan will automatically create this table if it does not already exist.", " is false, Titan will automatically create this table if it does not already exist.",
ConfigOption.Type.LOCAL, "titan"); ConfigOption.Type.LOCAL, "titan");
...@@ -139,7 +139,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -139,7 +139,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
* Titan connects to an HBase backend for the first time. * Titan connects to an HBase backend for the first time.
*/ */
public static final ConfigOption<Integer> REGION_COUNT = public static final ConfigOption<Integer> REGION_COUNT =
new ConfigOption<Integer>(HBASE_NS, "region-count", new ConfigOption<>(HBASE_NS, "region-count",
"The number of initial regions set when creating Titan's HBase table", "The number of initial regions set when creating Titan's HBase table",
ConfigOption.Type.MASKABLE, Integer.class, new Predicate<Integer>() { ConfigOption.Type.MASKABLE, Integer.class, new Predicate<Integer>() {
@Override @Override
...@@ -183,7 +183,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -183,7 +183,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
* These considerations may differ for other HBase implementations (e.g. MapR). * These considerations may differ for other HBase implementations (e.g. MapR).
*/ */
public static final ConfigOption<Integer> REGIONS_PER_SERVER = public static final ConfigOption<Integer> REGIONS_PER_SERVER =
new ConfigOption<Integer>(HBASE_NS, "regions-per-server", new ConfigOption<>(HBASE_NS, "regions-per-server",
"The number of regions per regionserver to set when creating Titan's HBase table", "The number of regions per regionserver to set when creating Titan's HBase table",
ConfigOption.Type.MASKABLE, Integer.class); ConfigOption.Type.MASKABLE, Integer.class);
...@@ -217,7 +217,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -217,7 +217,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
* *
*/ */
public static final ConfigOption<String> COMPAT_CLASS = public static final ConfigOption<String> COMPAT_CLASS =
new ConfigOption<String>(HBASE_NS, "compat-class", new ConfigOption<>(HBASE_NS, "compat-class",
"The package and class name of the HBaseCompat implementation. HBaseCompat masks version-specific HBase API differences. " + "The package and class name of the HBaseCompat implementation. HBaseCompat masks version-specific HBase API differences. " +
"When this option is unset, Titan calls HBase's VersionInfo.getVersion() and loads the matching compat class " + "When this option is unset, Titan calls HBase's VersionInfo.getVersion() and loads the matching compat class " +
"at runtime. Setting this option forces Titan to instead reflectively load and instantiate the specified class.", "at runtime. Setting this option forces Titan to instead reflectively load and instantiate the specified class.",
...@@ -266,7 +266,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -266,7 +266,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
private final HBaseCompat compat; private final HBaseCompat compat;
private static final ConcurrentHashMap<HBaseStoreManager, Throwable> openManagers = private static final ConcurrentHashMap<HBaseStoreManager, Throwable> openManagers =
new ConcurrentHashMap<HBaseStoreManager, Throwable>(); new ConcurrentHashMap<>();
// Mutable instance state // Mutable instance state
private final ConcurrentMap<String, HBaseKeyColumnValueStore> openStores; private final ConcurrentMap<String, HBaseKeyColumnValueStore> openStores;
...@@ -342,7 +342,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -342,7 +342,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
} }
logger.debug("End of HBase config key=value pairs"); logger.debug("End of HBase config key=value pairs");
openStores = new ConcurrentHashMap<String, HBaseKeyColumnValueStore>(); openStores = new ConcurrentHashMap<>();
} }
@Override @Override
...@@ -420,7 +420,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -420,7 +420,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
commitTime.getAdditionTime(times.getUnit()), commitTime.getAdditionTime(times.getUnit()),
commitTime.getDeletionTime(times.getUnit())); commitTime.getDeletionTime(times.getUnit()));
List<Row> batch = new ArrayList<Row>(commandsPerKey.size()); // actual batch operation List<Row> batch = new ArrayList<>(commandsPerKey.size()); // actual batch operation
// convert sorted commands into representation required for 'batch' operation // convert sorted commands into representation required for 'batch' operation
for (Pair<Put, Delete> commands : commandsPerKey.values()) { for (Pair<Put, Delete> commands : commandsPerKey.values()) {
...@@ -442,9 +442,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -442,9 +442,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
} finally { } finally {
IOUtils.closeQuietly(table); IOUtils.closeQuietly(table);
} }
} catch (IOException e) { } catch (IOException | InterruptedException e) {
throw new TemporaryBackendException(e);
} catch (InterruptedException e) {
throw new TemporaryBackendException(e); throw new TemporaryBackendException(e);
} }
...@@ -466,7 +464,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -466,7 +464,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
final String cfName = shortCfNames ? shortenCfName(longName) : longName; final String cfName = shortCfNames ? shortenCfName(longName) : longName;
final String llmPrefix = getName(); final String llmPrefix = getName();
llm = LocalLockMediators.INSTANCE.<StoreTransaction>get(llmPrefix, times); llm = LocalLockMediators.INSTANCE.get(llmPrefix, times);
HBaseKeyColumnValueStore newStore = new HBaseKeyColumnValueStore(this, cnx, tableName, cfName, longName, llm); HBaseKeyColumnValueStore newStore = new HBaseKeyColumnValueStore(this, cnx, tableName, cfName, longName, llm);
store = openStores.putIfAbsent(longName, newStore); // nothing bad happens if we loose to other thread store = openStores.putIfAbsent(longName, newStore); // nothing bad happens if we loose to other thread
...@@ -511,7 +509,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -511,7 +509,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
@Override @Override
public List<KeyRange> getLocalKeyPartition() throws BackendException { public List<KeyRange> getLocalKeyPartition() throws BackendException {
List<KeyRange> result = new LinkedList<KeyRange>(); List<KeyRange> result = new LinkedList<>();
TableMask table = null; TableMask table = null;
try { try {
...@@ -645,7 +643,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -645,7 +643,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
} }
// Require either no null key bounds or a pair of them // Require either no null key bounds or a pair of them
Preconditions.checkState(!(null == nullStart ^ null == nullEnd)); Preconditions.checkState((null == nullStart) == (null == nullEnd));
// Check that every key in the result is at least 4 bytes long // Check that every key in the result is at least 4 bytes long
Map<KeyRange, ServerName> result = b.build(); Map<KeyRange, ServerName> result = b.build();
...@@ -675,8 +673,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -675,8 +673,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
byte padded[] = new byte[targetLength]; byte padded[] = new byte[targetLength];
for (int i = 0; i < dataToPad.length; i++) System.arraycopy(dataToPad, 0, padded, 0, dataToPad.length);
padded[i] = dataToPad[i];
for (int i = dataToPad.length; i < padded.length; i++) for (int i = dataToPad.length; i < padded.length; i++)
padded[i] = (byte)0; padded[i] = (byte)0;
...@@ -856,7 +853,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -856,7 +853,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
private Map<StaticBuffer, Pair<Put, Delete>> convertToCommands(Map<String, Map<StaticBuffer, KCVMutation>> mutations, private Map<StaticBuffer, Pair<Put, Delete>> convertToCommands(Map<String, Map<StaticBuffer, KCVMutation>> mutations,
final long putTimestamp, final long putTimestamp,
final long delTimestamp) throws PermanentBackendException { final long delTimestamp) throws PermanentBackendException {
Map<StaticBuffer, Pair<Put, Delete>> commandsPerKey = new HashMap<StaticBuffer, Pair<Put, Delete>>(); Map<StaticBuffer, Pair<Put, Delete>> commandsPerKey = new HashMap<>();
for (Map.Entry<String, Map<StaticBuffer, KCVMutation>> entry : mutations.entrySet()) { for (Map.Entry<String, Map<StaticBuffer, KCVMutation>> entry : mutations.entrySet()) {
...@@ -870,7 +867,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -870,7 +867,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
Pair<Put, Delete> commands = commandsPerKey.get(m.getKey()); Pair<Put, Delete> commands = commandsPerKey.get(m.getKey());
if (commands == null) { if (commands == null) {
commands = new Pair<Put, Delete>(); commands = new Pair<>();
commandsPerKey.put(m.getKey(), commands); commandsPerKey.put(m.getKey(), commands);
} }
...@@ -928,7 +925,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol ...@@ -928,7 +925,7 @@ public class HBaseStoreManager extends DistributedStoreManager implements KeyCol
* Similar to {@link Function}, except that the {@code apply} method is allowed * Similar to {@link Function}, except that the {@code apply} method is allowed
* to throw {@link BackendException}. * to throw {@link BackendException}.
*/ */
private static interface BackendFunction<F, T> { private interface BackendFunction<F, T> {
T apply(F input) throws BackendException; T apply(F input) throws BackendException;
} }
......
...@@ -77,7 +77,7 @@ public class LocalLockMediator<T> { ...@@ -77,7 +77,7 @@ public class LocalLockMediator<T> {
* according to {@link AuditRecord#expires}, in which case the lock should * according to {@link AuditRecord#expires}, in which case the lock should
* be considered invalid. * be considered invalid.
*/ */
private final ConcurrentHashMap<KeyColumn, AuditRecord<T>> locks = new ConcurrentHashMap<KeyColumn, AuditRecord<T>>(); private final ConcurrentHashMap<KeyColumn, AuditRecord<T>> locks = new ConcurrentHashMap<>();
public LocalLockMediator(String name, TimestampProvider times) { public LocalLockMediator(String name, TimestampProvider times) {
this.name = name; this.name = name;
...@@ -125,7 +125,7 @@ public class LocalLockMediator<T> { ...@@ -125,7 +125,7 @@ public class LocalLockMediator<T> {
assert null != kc; assert null != kc;
assert null != requestor; assert null != requestor;
AuditRecord<T> audit = new AuditRecord<T>(requestor, expires); AuditRecord<T> audit = new AuditRecord<>(requestor, expires);
AuditRecord<T> inmap = locks.putIfAbsent(kc, audit); AuditRecord<T> inmap = locks.putIfAbsent(kc, audit);
boolean success = false; boolean success = false;
...@@ -134,7 +134,7 @@ public class LocalLockMediator<T> { ...@@ -134,7 +134,7 @@ public class LocalLockMediator<T> {
// Uncontended lock succeeded // Uncontended lock succeeded
if (log.isTraceEnabled()) { if (log.isTraceEnabled()) {
log.trace("New local lock created: {} namespace={} txn={}", log.trace("New local lock created: {} namespace={} txn={}",
new Object[]{kc, name, requestor}); kc, name, requestor);
} }
success = true; success = true;
} else if (inmap.equals(audit)) { } else if (inmap.equals(audit)) {
...@@ -144,13 +144,13 @@ public class LocalLockMediator<T> { ...@@ -144,13 +144,13 @@ public class LocalLockMediator<T> {
if (success) { if (success) {
log.trace( log.trace(
"Updated local lock expiration: {} namespace={} txn={} oldexp={} newexp={}", "Updated local lock expiration: {} namespace={} txn={} oldexp={} newexp={}",
new Object[]{kc, name, requestor, inmap.expires, kc, name, requestor, inmap.expires,
audit.expires}); audit.expires);
} else { } else {
log.trace( log.trace(
"Failed to update local lock expiration: {} namespace={} txn={} oldexp={} newexp={}", "Failed to update local lock expiration: {} namespace={} txn={} oldexp={} newexp={}",
new Object[]{kc, name, requestor, inmap.expires, kc, name, requestor, inmap.expires,
audit.expires}); audit.expires);
} }
} }
} else if (0 > inmap.expires.compareTo(times.getTime())) { } else if (0 > inmap.expires.compareTo(times.getTime())) {
...@@ -159,14 +159,14 @@ public class LocalLockMediator<T> { ...@@ -159,14 +159,14 @@ public class LocalLockMediator<T> {
if (log.isTraceEnabled()) { if (log.isTraceEnabled()) {
log.trace( log.trace(
"Discarding expired lock: {} namespace={} txn={} expired={}", "Discarding expired lock: {} namespace={} txn={} expired={}",
new Object[]{kc, name, inmap.holder, inmap.expires}); kc, name, inmap.holder, inmap.expires);
} }
} else { } else {
// we lost to a valid lock // we lost to a valid lock
if (log.isTraceEnabled()) { if (log.isTraceEnabled()) {
log.trace( log.trace(
"Local lock failed: {} namespace={} txn={} (already owned by {})", "Local lock failed: {} namespace={} txn={} (already owned by {})",
new Object[]{kc, name, requestor, inmap}); kc, name, requestor, inmap);
} }
} }
...@@ -190,13 +190,13 @@ public class LocalLockMediator<T> { ...@@ -190,13 +190,13 @@ public class LocalLockMediator<T> {
return false; return false;
} }
AuditRecord<T> unlocker = new AuditRecord<T>(requestor, null); AuditRecord<T> unlocker = new AuditRecord<>(requestor, null);
AuditRecord<T> holder = locks.get(kc); AuditRecord<T> holder = locks.get(kc);
if (!holder.equals(unlocker)) { if (!holder.equals(unlocker)) {
log.error("Local unlock of {} by {} failed: it is held by {}", log.error("Local unlock of {} by {} failed: it is held by {}",
new Object[]{kc, unlocker, holder}); kc, unlocker, holder);
return false; return false;
} }
...@@ -206,7 +206,7 @@ public class LocalLockMediator<T> { ...@@ -206,7 +206,7 @@ public class LocalLockMediator<T> {
expiryQueue.remove(kc); expiryQueue.remove(kc);
if (log.isTraceEnabled()) { if (log.isTraceEnabled()) {
log.trace("Local unlock succeeded: {} namespace={} txn={}", log.trace("Local unlock succeeded: {} namespace={} txn={}",
new Object[]{kc, name, requestor}); kc, name, requestor);
} }
} else { } else {
log.warn("Local unlock warning: lock record for {} disappeared " log.warn("Local unlock warning: lock record for {} disappeared "
......
...@@ -19,7 +19,6 @@ import com.google.common.base.Predicate; ...@@ -19,7 +19,6 @@ import com.google.common.base.Predicate;
import com.google.common.collect.*; import com.google.common.collect.*;
import com.thinkaurelius.titan.core.*; import com.thinkaurelius.titan.core.*;
import com.thinkaurelius.titan.core.attribute.Cmp; import com.thinkaurelius.titan.core.attribute.Cmp;
import com.thinkaurelius.titan.core.Cardinality;
import com.thinkaurelius.titan.core.schema.SchemaStatus; import com.thinkaurelius.titan.core.schema.SchemaStatus;
import com.thinkaurelius.titan.core.schema.TitanSchemaType; import com.thinkaurelius.titan.core.schema.TitanSchemaType;
import com.thinkaurelius.titan.graphdb.database.IndexSerializer; import com.thinkaurelius.titan.graphdb.database.IndexSerializer;
...@@ -78,7 +77,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue ...@@ -78,7 +77,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
Preconditions.checkNotNull(serializer); Preconditions.checkNotNull(serializer);
this.tx = tx; this.tx = tx;
this.serializer = serializer; this.serializer = serializer;
this.constraints = new ArrayList<PredicateCondition<String, TitanElement>>(5); this.constraints = new ArrayList<>(5);
} }
/* --------------------------------------------------------------- /* ---------------------------------------------------------------
...@@ -90,7 +89,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue ...@@ -90,7 +89,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
Preconditions.checkNotNull(key); Preconditions.checkNotNull(key);
Preconditions.checkNotNull(predicate); Preconditions.checkNotNull(predicate);
Preconditions.checkArgument(predicate.isValidCondition(condition), "Invalid condition: %s", condition); Preconditions.checkArgument(predicate.isValidCondition(condition), "Invalid condition: %s", condition);
constraints.add(new PredicateCondition<String, TitanElement>(key, predicate, condition)); constraints.add(new PredicateCondition<>(key, predicate, condition));
return this; return this;
} }
...@@ -172,19 +171,19 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue ...@@ -172,19 +171,19 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
@Override @Override
public Iterable<Vertex> vertices() { public Iterable<Vertex> vertices() {
GraphCentricQuery query = constructQuery(ElementCategory.VERTEX); GraphCentricQuery query = constructQuery(ElementCategory.VERTEX);
return Iterables.filter(new QueryProcessor<GraphCentricQuery, TitanElement, JointIndexQuery>(query, tx.elementProcessor), Vertex.class); return Iterables.filter(new QueryProcessor<>(query, tx.elementProcessor), Vertex.class);
} }
@Override @Override
public Iterable<Edge> edges() { public Iterable<Edge> edges() {
GraphCentricQuery query = constructQuery(ElementCategory.EDGE); GraphCentricQuery query = constructQuery(ElementCategory.EDGE);
return Iterables.filter(new QueryProcessor<GraphCentricQuery, TitanElement, JointIndexQuery>(query, tx.elementProcessor), Edge.class); return Iterables.filter(new QueryProcessor<>(query, tx.elementProcessor), Edge.class);
} }
@Override @Override
public Iterable<TitanProperty> properties() { public Iterable<TitanProperty> properties() {
GraphCentricQuery query = constructQuery(ElementCategory.PROPERTY); GraphCentricQuery query = constructQuery(ElementCategory.PROPERTY);
return Iterables.filter(new QueryProcessor<GraphCentricQuery, TitanElement, JointIndexQuery>(query, tx.elementProcessor), TitanProperty.class); return Iterables.filter(new QueryProcessor<>(query, tx.elementProcessor), TitanProperty.class);
} }
private QueryDescription describe(ElementCategory category) { private QueryDescription describe(ElementCategory category) {
...@@ -232,7 +231,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue ...@@ -232,7 +231,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
if (orders.isEmpty()) orders = OrderList.NO_ORDER; if (orders.isEmpty()) orders = OrderList.NO_ORDER;
//Compile all indexes that cover at least one of the query conditions //Compile all indexes that cover at least one of the query conditions
final Set<IndexType> indexCandidates = new HashSet<IndexType>(); final Set<IndexType> indexCandidates = new HashSet<>();
ConditionUtil.traversal(conditions, new Predicate<Condition<TitanElement>>() { ConditionUtil.traversal(conditions, new Predicate<Condition<TitanElement>>() {
@Override @Override
public boolean apply(@Nullable Condition<TitanElement> condition) { public boolean apply(@Nullable Condition<TitanElement> condition) {
...@@ -281,7 +280,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue ...@@ -281,7 +280,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
log.warn("The query optimizer currently does not support multiple label constraints in query: {}", this); log.warn("The query optimizer currently does not support multiple label constraints in query: {}", this);
continue; continue;
} }
if (!type.getName().equals((String)Iterables.getOnlyElement(labels))) continue; if (!type.getName().equals(Iterables.getOnlyElement(labels))) continue;
subcover.add(equalCon.getKey()); subcover.add(equalCon.getKey());
} }
...@@ -345,9 +344,9 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue ...@@ -345,9 +344,9 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
} }
indexLimit = Math.min(HARD_MAX_LIMIT, QueryUtil.adjustLimitForTxModifications(tx, coveredClauses.size(), indexLimit)); indexLimit = Math.min(HARD_MAX_LIMIT, QueryUtil.adjustLimitForTxModifications(tx, coveredClauses.size(), indexLimit));
jointQuery.setLimit(indexLimit); jointQuery.setLimit(indexLimit);
query = new BackendQueryHolder<JointIndexQuery>(jointQuery, coveredClauses.size()==conditions.numChildren(), isSorted, null); query = new BackendQueryHolder<>(jointQuery, coveredClauses.size() == conditions.numChildren(), isSorted, null);
} else { } else {
query = new BackendQueryHolder<JointIndexQuery>(new JointIndexQuery(), false, isSorted, null); query = new BackendQueryHolder<>(new JointIndexQuery(), false, isSorted, null);
} }
return new GraphCentricQuery(resultType, conditions, orders, query, limit); return new GraphCentricQuery(resultType, conditions, orders, query, limit);
...@@ -366,8 +365,8 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue ...@@ -366,8 +365,8 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
if (index.getStatus()!= SchemaStatus.ENABLED) return null; if (index.getStatus()!= SchemaStatus.ENABLED) return null;
IndexField[] fields = index.getFieldKeys(); IndexField[] fields = index.getFieldKeys();
Object[] indexValues = new Object[fields.length]; Object[] indexValues = new Object[fields.length];
Set<Condition> coveredClauses = new HashSet<Condition>(fields.length); Set<Condition> coveredClauses = new HashSet<>(fields.length);
List<Object[]> indexCovers = new ArrayList<Object[]>(4); List<Object[]> indexCovers = new ArrayList<>(4);
constructIndexCover(indexValues, 0, fields, condition, indexCovers, coveredClauses); constructIndexCover(indexValues, 0, fields, condition, indexCovers, coveredClauses);
if (!indexCovers.isEmpty()) { if (!indexCovers.isEmpty()) {
...@@ -384,7 +383,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue ...@@ -384,7 +383,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
} else { } else {
IndexField field = fields[position]; IndexField field = fields[position];
Map.Entry<Condition, Collection<Object>> equalCon = getEqualityConditionValues(condition, field.getFieldKey()); Map.Entry<Condition, Collection<Object>> equalCon = getEqualityConditionValues(condition, field.getFieldKey());
if (equalCon!=null) { if (equalCon != null) {
coveredClauses.add(equalCon.getKey()); coveredClauses.add(equalCon.getKey());
assert equalCon.getValue().size()>0; assert equalCon.getValue().size()>0;
for (Object value : equalCon.getValue()) { for (Object value : equalCon.getValue()) {
...@@ -392,7 +391,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue ...@@ -392,7 +391,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
newValues[position]=value; newValues[position]=value;
constructIndexCover(newValues, position+1, fields, condition, indexCovers, coveredClauses); constructIndexCover(newValues, position+1, fields, condition, indexCovers, coveredClauses);
} }
} else return; }
} }
} }
...@@ -419,7 +418,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue ...@@ -419,7 +418,7 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
final IndexSerializer indexInfo, final Set<Condition> covered) { final IndexSerializer indexInfo, final Set<Condition> covered) {
assert QueryUtil.isQueryNormalForm(condition); assert QueryUtil.isQueryNormalForm(condition);
assert condition instanceof And; assert condition instanceof And;
And<TitanElement> subcondition = new And<TitanElement>(condition.numChildren()); And<TitanElement> subcondition = new And<>(condition.numChildren());
for (Condition<TitanElement> subclause : condition.getChildren()) { for (Condition<TitanElement> subclause : condition.getChildren()) {
if (coversAll(index, subclause, indexInfo)) { if (coversAll(index, subclause, indexInfo)) {
subcondition.add(subclause); subcondition.add(subclause);
...@@ -439,9 +438,9 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue ...@@ -439,9 +438,9 @@ public class GraphCentricQueryBuilder implements TitanGraphQuery<GraphCentricQue
PropertyKey key = (PropertyKey) atom.getKey(); PropertyKey key = (PropertyKey) atom.getKey();
ParameterIndexField[] fields = index.getFieldKeys(); ParameterIndexField[] fields = index.getFieldKeys();
ParameterIndexField match = null; ParameterIndexField match = null;
for (int i = 0; i < fields.length; i++) { for (ParameterIndexField field : fields) {
if (fields[i].getStatus()!= SchemaStatus.ENABLED) continue; if (field.getStatus() != SchemaStatus.ENABLED) continue;
if (fields[i].getFieldKey().equals(key)) match = fields[i]; if (field.getFieldKey().equals(key)) match = field;
} }
if (match==null) return false; if (match==null) return false;
return indexInfo.supports(index, match, atom.getPredicate()); return indexInfo.supports(index, match, atom.getPredicate());
......
...@@ -319,12 +319,12 @@ public class Titan0Graph implements AtlasGraph<Titan0Vertex, Titan0Edge> { ...@@ -319,12 +319,12 @@ public class Titan0Graph implements AtlasGraph<Titan0Vertex, Titan0Edge> {
public Iterable<AtlasEdge<Titan0Vertex, Titan0Edge>> wrapEdges(Iterator<Edge> it) { public Iterable<AtlasEdge<Titan0Vertex, Titan0Edge>> wrapEdges(Iterator<Edge> it) {
Iterable<Edge> iterable = new IteratorToIterableAdapter<Edge>(it); Iterable<Edge> iterable = new IteratorToIterableAdapter<>(it);
return wrapEdges(iterable); return wrapEdges(iterable);
} }
public Iterable<AtlasVertex<Titan0Vertex, Titan0Edge>> wrapVertices(Iterator<Vertex> it) { public Iterable<AtlasVertex<Titan0Vertex, Titan0Edge>> wrapVertices(Iterator<Vertex> it) {
Iterable<Vertex> iterable = new IteratorToIterableAdapter<Vertex>(it); Iterable<Vertex> iterable = new IteratorToIterableAdapter<>(it);
return wrapVertices(iterable); return wrapVertices(iterable);
} }
...@@ -341,7 +341,7 @@ public class Titan0Graph implements AtlasGraph<Titan0Vertex, Titan0Edge> { ...@@ -341,7 +341,7 @@ public class Titan0Graph implements AtlasGraph<Titan0Vertex, Titan0Edge> {
} }
public Iterable<AtlasEdge<Titan0Vertex, Titan0Edge>> wrapEdges(Iterable<Edge> it) { public Iterable<AtlasEdge<Titan0Vertex, Titan0Edge>> wrapEdges(Iterable<Edge> it) {
Iterable<Edge> result = (Iterable<Edge>)it; Iterable<Edge> result = it;
return Iterables.transform(result, new Function<Edge, AtlasEdge<Titan0Vertex, Titan0Edge>>(){ return Iterables.transform(result, new Function<Edge, AtlasEdge<Titan0Vertex, Titan0Edge>>(){
@Override @Override
......
...@@ -86,7 +86,7 @@ public class Titan0GraphIndex implements AtlasGraphIndex { ...@@ -86,7 +86,7 @@ public class Titan0GraphIndex implements AtlasGraphIndex {
@Override @Override
public Set<AtlasPropertyKey> getFieldKeys() { public Set<AtlasPropertyKey> getFieldKeys() {
PropertyKey[] keys = wrappedIndex.getFieldKeys(); PropertyKey[] keys = wrappedIndex.getFieldKeys();
Set<AtlasPropertyKey> result = new HashSet<AtlasPropertyKey>(); Set<AtlasPropertyKey> result = new HashSet<>();
for(PropertyKey key : keys) { for(PropertyKey key : keys) {
result.add(GraphDbObjectFactory.createPropertyKey(key)); result.add(GraphDbObjectFactory.createPropertyKey(key));
} }
......
...@@ -103,7 +103,7 @@ public class Titan0Vertex extends Titan0Element<Vertex> implements AtlasVertex<T ...@@ -103,7 +103,7 @@ public class Titan0Vertex extends Titan0Element<Vertex> implements AtlasVertex<T
public <T> Collection<T> getPropertyValues(String key, Class<T> clazz) { public <T> Collection<T> getPropertyValues(String key, Class<T> clazz) {
TitanVertex tv = getAsTitanVertex(); TitanVertex tv = getAsTitanVertex();
Collection<T> result = new ArrayList<T>(); Collection<T> result = new ArrayList<>();
for (TitanProperty property : tv.getProperties(key)) { for (TitanProperty property : tv.getProperties(key)) {
result.add((T) property.getValue()); result.add((T) property.getValue());
} }
......
...@@ -56,7 +56,7 @@ public class NativeTitan0GraphQuery implements NativeTitanGraphQuery<Titan0Verte ...@@ -56,7 +56,7 @@ public class NativeTitan0GraphQuery implements NativeTitanGraphQuery<Titan0Verte
@Override @Override
public void in(String propertyName, Collection<? extends Object> values) { public void in(String propertyName, Collection<?> values) {
query.has(propertyName, Contain.IN, values); query.has(propertyName, Contain.IN, values);
} }
......
...@@ -39,13 +39,13 @@ public class LocalLockMediatorTest { ...@@ -39,13 +39,13 @@ public class LocalLockMediatorTest {
public void testLock() throws InterruptedException { public void testLock() throws InterruptedException {
TimestampProvider times = Timestamps.MICRO; TimestampProvider times = Timestamps.MICRO;
LocalLockMediator<HBaseTransaction> llm = LocalLockMediator<HBaseTransaction> llm =
new LocalLockMediator<HBaseTransaction>(LOCK_NAMESPACE, times); new LocalLockMediator<>(LOCK_NAMESPACE, times);
//Expire immediately //Expire immediately
Assert.assertTrue(llm.lock(kc, mockTx1, times.getTime(0, TimeUnit.NANOSECONDS))); Assert.assertTrue(llm.lock(kc, mockTx1, times.getTime(0, TimeUnit.NANOSECONDS)));
Assert.assertTrue(llm.lock(kc, mockTx2, times.getTime(Long.MAX_VALUE, TimeUnit.NANOSECONDS))); Assert.assertTrue(llm.lock(kc, mockTx2, times.getTime(Long.MAX_VALUE, TimeUnit.NANOSECONDS)));
llm = new LocalLockMediator<HBaseTransaction>(LOCK_NAMESPACE, times); llm = new LocalLockMediator<>(LOCK_NAMESPACE, times);
//Expire later //Expire later
Assert.assertTrue(llm.lock(kc, mockTx1, times.getTime(Long.MAX_VALUE, TimeUnit.NANOSECONDS))); Assert.assertTrue(llm.lock(kc, mockTx1, times.getTime(Long.MAX_VALUE, TimeUnit.NANOSECONDS)));
......
...@@ -402,7 +402,7 @@ public class GraphQueryTest extends AbstractGraphDatabaseTest { ...@@ -402,7 +402,7 @@ public class GraphQueryTest extends AbstractGraphDatabaseTest {
} }
private static <T> List<T> toList(Iterable<T> itr) { private static <T> List<T> toList(Iterable<T> itr) {
List<T> result = new ArrayList<T>(); List<T> result = new ArrayList<>();
for(T object : itr) { for(T object : itr) {
result.add(object); result.add(object);
} }
......
...@@ -90,38 +90,24 @@ public class Titan0DatabaseTest { ...@@ -90,38 +90,24 @@ public class Titan0DatabaseTest {
testProperty(graph, "booleanProperty", Boolean.TRUE); testProperty(graph, "booleanProperty", Boolean.TRUE);
testProperty(graph, "booleanProperty", Boolean.FALSE); testProperty(graph, "booleanProperty", Boolean.FALSE);
testProperty(graph, "booleanProperty", new Boolean(Boolean.TRUE));
testProperty(graph, "booleanProperty", new Boolean(Boolean.FALSE));
testProperty(graph, "byteProperty", Byte.MAX_VALUE); testProperty(graph, "byteProperty", Byte.MAX_VALUE);
testProperty(graph, "byteProperty", Byte.MIN_VALUE); testProperty(graph, "byteProperty", Byte.MIN_VALUE);
testProperty(graph, "byteProperty", new Byte(Byte.MAX_VALUE));
testProperty(graph, "byteProperty", new Byte(Byte.MIN_VALUE));
testProperty(graph, "shortProperty", Short.MAX_VALUE); testProperty(graph, "shortProperty", Short.MAX_VALUE);
testProperty(graph, "shortProperty", Short.MIN_VALUE); testProperty(graph, "shortProperty", Short.MIN_VALUE);
testProperty(graph, "shortProperty", new Short(Short.MAX_VALUE));
testProperty(graph, "shortProperty", new Short(Short.MIN_VALUE));
testProperty(graph, "intProperty", Integer.MAX_VALUE); testProperty(graph, "intProperty", Integer.MAX_VALUE);
testProperty(graph, "intProperty", Integer.MIN_VALUE); testProperty(graph, "intProperty", Integer.MIN_VALUE);
testProperty(graph, "intProperty", new Integer(Integer.MAX_VALUE));
testProperty(graph, "intProperty", new Integer(Integer.MIN_VALUE));
testProperty(graph, "longProperty", Long.MIN_VALUE); testProperty(graph, "longProperty", Long.MIN_VALUE);
testProperty(graph, "longProperty", Long.MAX_VALUE); testProperty(graph, "longProperty", Long.MAX_VALUE);
testProperty(graph, "longProperty", new Long(Long.MIN_VALUE));
testProperty(graph, "longProperty", new Long(Long.MAX_VALUE));
testProperty(graph, "doubleProperty", Double.MAX_VALUE); testProperty(graph, "doubleProperty", Double.MAX_VALUE);
testProperty(graph, "doubleProperty", Double.MIN_VALUE); testProperty(graph, "doubleProperty", Double.MIN_VALUE);
testProperty(graph, "doubleProperty", new Double(Double.MAX_VALUE));
testProperty(graph, "doubleProperty", new Double(Double.MIN_VALUE));
testProperty(graph, "floatProperty", Float.MAX_VALUE); testProperty(graph, "floatProperty", Float.MAX_VALUE);
testProperty(graph, "floatProperty", Float.MIN_VALUE); testProperty(graph, "floatProperty", Float.MIN_VALUE);
testProperty(graph, "floatProperty", new Float(Float.MAX_VALUE));
testProperty(graph, "floatProperty", new Float(Float.MIN_VALUE));
// enumerations - TypeCategory // enumerations - TypeCategory
testProperty(graph, "typeCategoryProperty", TypeCategory.CLASS); testProperty(graph, "typeCategoryProperty", TypeCategory.CLASS);
...@@ -147,7 +133,7 @@ public class Titan0DatabaseTest { ...@@ -147,7 +133,7 @@ public class Titan0DatabaseTest {
@Test @Test
public <V, E> void testMultiplicityOnePropertySupport() { public <V, E> void testMultiplicityOnePropertySupport() {
AtlasGraph<V, E> graph = (AtlasGraph<V, E>) getGraph(); AtlasGraph<V, E> graph = getGraph();
AtlasVertex<V, E> vertex = graph.addVertex(); AtlasVertex<V, E> vertex = graph.addVertex();
vertex.setProperty("name", "Jeff"); vertex.setProperty("name", "Jeff");
...@@ -183,7 +169,7 @@ public class Titan0DatabaseTest { ...@@ -183,7 +169,7 @@ public class Titan0DatabaseTest {
@Test @Test
public <V, E> void testRemoveEdge() { public <V, E> void testRemoveEdge() {
AtlasGraph<V, E> graph = (AtlasGraph<V, E>) getGraph(); AtlasGraph<V, E> graph = getGraph();
AtlasVertex<V, E> v1 = graph.addVertex(); AtlasVertex<V, E> v1 = graph.addVertex();
AtlasVertex<V, E> v2 = graph.addVertex(); AtlasVertex<V, E> v2 = graph.addVertex();
...@@ -205,7 +191,7 @@ public class Titan0DatabaseTest { ...@@ -205,7 +191,7 @@ public class Titan0DatabaseTest {
@Test @Test
public <V, E> void testRemoveVertex() { public <V, E> void testRemoveVertex() {
AtlasGraph<V, E> graph = (AtlasGraph<V, E>) getGraph(); AtlasGraph<V, E> graph = getGraph();
AtlasVertex<V, E> v1 = graph.addVertex(); AtlasVertex<V, E> v1 = graph.addVertex();
...@@ -219,7 +205,7 @@ public class Titan0DatabaseTest { ...@@ -219,7 +205,7 @@ public class Titan0DatabaseTest {
@Test @Test
public <V, E> void testGetEdges() { public <V, E> void testGetEdges() {
AtlasGraph<V, E> graph = (AtlasGraph<V, E>) getGraph(); AtlasGraph<V, E> graph = getGraph();
AtlasVertex<V, E> v1 = graph.addVertex(); AtlasVertex<V, E> v1 = graph.addVertex();
AtlasVertex<V, E> v2 = graph.addVertex(); AtlasVertex<V, E> v2 = graph.addVertex();
AtlasVertex<V, E> v3 = graph.addVertex(); AtlasVertex<V, E> v3 = graph.addVertex();
...@@ -296,7 +282,7 @@ public class Titan0DatabaseTest { ...@@ -296,7 +282,7 @@ public class Titan0DatabaseTest {
AtlasGraph<V, E> graph = getGraph(); AtlasGraph<V, E> graph = getGraph();
AtlasVertex<V, E> vertex = graph.addVertex(); AtlasVertex<V, E> vertex = graph.addVertex();
vertex.setListProperty("colors", Arrays.asList(new String[] { "red", "blue", "green" })); vertex.setListProperty("colors", Arrays.asList("red", "blue", "green"));
List<String> colors = vertex.getListProperty("colors"); List<String> colors = vertex.getListProperty("colors");
assertTrue(colors.contains("red")); assertTrue(colors.contains("red"));
assertTrue(colors.contains("blue")); assertTrue(colors.contains("blue"));
...@@ -419,7 +405,7 @@ public class Titan0DatabaseTest { ...@@ -419,7 +405,7 @@ public class Titan0DatabaseTest {
} }
private static <T> List<T> toList(Iterable<? extends T> iterable) { private static <T> List<T> toList(Iterable<? extends T> iterable) {
List<T> result = new ArrayList<T>(); List<T> result = new ArrayList<>();
for (T item : iterable) { for (T item : iterable) {
result.add(item); result.add(item);
} }
......
...@@ -48,7 +48,7 @@ public class SearchFilter { ...@@ -48,7 +48,7 @@ public class SearchFilter {
/** /**
* to specify whether the result should be sorted? If yes, whether asc or desc. * to specify whether the result should be sorted? If yes, whether asc or desc.
*/ */
public enum SortType { NONE, ASC, DESC }; public enum SortType { NONE, ASC, DESC }
private MultivaluedMap<String, String> params = null; private MultivaluedMap<String, String> params = null;
private long startIndex = 0; private long startIndex = 0;
......
...@@ -55,7 +55,7 @@ public class AtlasEntity extends AtlasStruct implements Serializable { ...@@ -55,7 +55,7 @@ public class AtlasEntity extends AtlasStruct implements Serializable {
/** /**
* Status of the entity - can be active or deleted. Deleted entities are not removed from Atlas store. * Status of the entity - can be active or deleted. Deleted entities are not removed from Atlas store.
*/ */
public enum Status { STATUS_ACTIVE, STATUS_DELETED }; public enum Status { STATUS_ACTIVE, STATUS_DELETED }
private String guid = null; private String guid = null;
private Status status = Status.STATUS_ACTIVE; private Status status = Status.STATUS_ACTIVE;
......
...@@ -18,7 +18,6 @@ ...@@ -18,7 +18,6 @@
package org.apache.atlas.model.instance; package org.apache.atlas.model.instance;
import java.io.Serializable; import java.io.Serializable;
import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
......
...@@ -29,7 +29,6 @@ import javax.xml.bind.annotation.XmlAccessorType; ...@@ -29,7 +29,6 @@ import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.bind.annotation.XmlSeeAlso;
import java.io.Serializable; import java.io.Serializable;
import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
......
...@@ -119,7 +119,7 @@ public class AtlasStruct implements Serializable { ...@@ -119,7 +119,7 @@ public class AtlasStruct implements Serializable {
if (a != null) { if (a != null) {
a.put(name, value); a.put(name, value);
} else { } else {
a = new HashMap<String, Object>(); a = new HashMap<>();
a.put(name, value); a.put(name, value);
this.attributes = a; this.attributes = a;
...@@ -208,7 +208,7 @@ public class AtlasStruct implements Serializable { ...@@ -208,7 +208,7 @@ public class AtlasStruct implements Serializable {
return sb; return sb;
} }
public static StringBuilder dumpObjects(Collection<? extends Object> objects, StringBuilder sb) { public static StringBuilder dumpObjects(Collection<?> objects, StringBuilder sb) {
if (sb == null) { if (sb == null) {
sb = new StringBuilder(); sb = new StringBuilder();
} }
...@@ -228,14 +228,14 @@ public class AtlasStruct implements Serializable { ...@@ -228,14 +228,14 @@ public class AtlasStruct implements Serializable {
return sb; return sb;
} }
public static StringBuilder dumpObjects(Map<? extends Object, ? extends Object> objects, StringBuilder sb) { public static StringBuilder dumpObjects(Map<?, ?> objects, StringBuilder sb) {
if (sb == null) { if (sb == null) {
sb = new StringBuilder(); sb = new StringBuilder();
} }
if (MapUtils.isNotEmpty(objects)) { if (MapUtils.isNotEmpty(objects)) {
int i = 0; int i = 0;
for (Map.Entry<? extends Object, ? extends Object> e : objects.entrySet()) { for (Map.Entry<?, ?> e : objects.entrySet()) {
if (i > 0) { if (i > 0) {
sb.append(", "); sb.append(", ");
} }
......
...@@ -29,7 +29,6 @@ import javax.xml.bind.annotation.XmlAccessType; ...@@ -29,7 +29,6 @@ import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
...@@ -71,7 +70,7 @@ public class EntityMutationResponse { ...@@ -71,7 +70,7 @@ public class EntityMutationResponse {
public void addEntity(EntityMutations.EntityOperation op, AtlasEntityHeader header) { public void addEntity(EntityMutations.EntityOperation op, AtlasEntityHeader header) {
if (entitiesMutated == null) { if (entitiesMutated == null) {
entitiesMutated = new HashMap<EntityMutations.EntityOperation, List<AtlasEntityHeader>>(); entitiesMutated = new HashMap<>();
} }
if (entitiesMutated != null && entitiesMutated.get(op) == null) { if (entitiesMutated != null && entitiesMutated.get(op) == null) {
......
...@@ -27,7 +27,6 @@ import javax.xml.bind.annotation.XmlAccessorType; ...@@ -27,7 +27,6 @@ import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
......
...@@ -20,6 +20,7 @@ package org.apache.atlas.model.typedef; ...@@ -20,6 +20,7 @@ package org.apache.atlas.model.typedef;
import org.apache.atlas.model.TypeCategory; import org.apache.atlas.model.TypeCategory;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils; import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.codehaus.jackson.annotate.JsonAutoDetect; import org.codehaus.jackson.annotate.JsonAutoDetect;
import org.codehaus.jackson.annotate.JsonIgnoreProperties; import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.map.annotate.JsonSerialize; import org.codehaus.jackson.map.annotate.JsonSerialize;
...@@ -278,6 +279,7 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable { ...@@ -278,6 +279,7 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable {
return sb; return sb;
} }
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
...@@ -313,11 +315,6 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable { ...@@ -313,11 +315,6 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable {
return result; return result;
} }
@Override
public String toString() {
return toString(new StringBuilder()).toString();
}
public static String getArrayTypeName(String elemTypeName) { public static String getArrayTypeName(String elemTypeName) {
return ATLAS_TYPE_ARRAY_PREFIX + elemTypeName + ATLAS_TYPE_ARRAY_SUFFIX; return ATLAS_TYPE_ARRAY_PREFIX + elemTypeName + ATLAS_TYPE_ARRAY_SUFFIX;
} }
...@@ -327,7 +324,7 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable { ...@@ -327,7 +324,7 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable {
valueTypeName, ATLAS_TYPE_MAP_SUFFIX); valueTypeName, ATLAS_TYPE_MAP_SUFFIX);
} }
public static StringBuilder dumpObjects(Collection<? extends Object> objects, StringBuilder sb) { public static StringBuilder dumpObjects(Collection<?> objects, StringBuilder sb) {
if (sb == null) { if (sb == null) {
sb = new StringBuilder(); sb = new StringBuilder();
} }
...@@ -347,14 +344,14 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable { ...@@ -347,14 +344,14 @@ public abstract class AtlasBaseTypeDef implements java.io.Serializable {
return sb; return sb;
} }
public static StringBuilder dumpObjects(Map<? extends Object, ? extends Object> objects, StringBuilder sb) { public static StringBuilder dumpObjects(Map<?, ?> objects, StringBuilder sb) {
if (sb == null) { if (sb == null) {
sb = new StringBuilder(); sb = new StringBuilder();
} }
if (MapUtils.isNotEmpty(objects)) { if (MapUtils.isNotEmpty(objects)) {
int i = 0; int i = 0;
for (Map.Entry<? extends Object, ? extends Object> e : objects.entrySet()) { for (Map.Entry<?, ?> e : objects.entrySet()) {
if (i > 0) { if (i > 0) {
sb.append(", "); sb.append(", ");
} }
......
...@@ -103,9 +103,9 @@ public class AtlasClassificationDef extends AtlasStructDef implements java.io.Se ...@@ -103,9 +103,9 @@ public class AtlasClassificationDef extends AtlasStructDef implements java.io.Se
} }
if (CollectionUtils.isEmpty(superTypes)) { if (CollectionUtils.isEmpty(superTypes)) {
this.superTypes = new HashSet<String>(); this.superTypes = new HashSet<>();
} else { } else {
this.superTypes = new HashSet<String>(superTypes); this.superTypes = new HashSet<>(superTypes);
} }
} }
...@@ -117,7 +117,7 @@ public class AtlasClassificationDef extends AtlasStructDef implements java.io.Se ...@@ -117,7 +117,7 @@ public class AtlasClassificationDef extends AtlasStructDef implements java.io.Se
Set<String> s = this.superTypes; Set<String> s = this.superTypes;
if (!hasSuperType(s, typeName)) { if (!hasSuperType(s, typeName)) {
s = new HashSet<String>(s); s = new HashSet<>(s);
s.add(typeName); s.add(typeName);
...@@ -129,7 +129,7 @@ public class AtlasClassificationDef extends AtlasStructDef implements java.io.Se ...@@ -129,7 +129,7 @@ public class AtlasClassificationDef extends AtlasStructDef implements java.io.Se
Set<String> s = this.superTypes; Set<String> s = this.superTypes;
if (hasSuperType(s, typeName)) { if (hasSuperType(s, typeName)) {
s = new HashSet<String>(s); s = new HashSet<>(s);
s.remove(typeName); s.remove(typeName);
......
...@@ -101,9 +101,9 @@ public class AtlasEntityDef extends AtlasStructDef implements java.io.Serializab ...@@ -101,9 +101,9 @@ public class AtlasEntityDef extends AtlasStructDef implements java.io.Serializab
} }
if (CollectionUtils.isEmpty(superTypes)) { if (CollectionUtils.isEmpty(superTypes)) {
this.superTypes = new HashSet<String>(); this.superTypes = new HashSet<>();
} else { } else {
this.superTypes = new HashSet<String>(superTypes); this.superTypes = new HashSet<>(superTypes);
} }
} }
...@@ -115,7 +115,7 @@ public class AtlasEntityDef extends AtlasStructDef implements java.io.Serializab ...@@ -115,7 +115,7 @@ public class AtlasEntityDef extends AtlasStructDef implements java.io.Serializab
Set<String> s = this.superTypes; Set<String> s = this.superTypes;
if (!hasSuperType(s, typeName)) { if (!hasSuperType(s, typeName)) {
s = new HashSet<String>(s); s = new HashSet<>(s);
s.add(typeName); s.add(typeName);
...@@ -127,7 +127,7 @@ public class AtlasEntityDef extends AtlasStructDef implements java.io.Serializab ...@@ -127,7 +127,7 @@ public class AtlasEntityDef extends AtlasStructDef implements java.io.Serializab
Set<String> s = this.superTypes; Set<String> s = this.superTypes;
if (hasSuperType(s, typeName)) { if (hasSuperType(s, typeName)) {
s = new HashSet<String>(s); s = new HashSet<>(s);
s.remove(typeName); s.remove(typeName);
......
...@@ -107,11 +107,11 @@ public class AtlasEnumDef extends AtlasBaseTypeDef implements Serializable { ...@@ -107,11 +107,11 @@ public class AtlasEnumDef extends AtlasBaseTypeDef implements Serializable {
} }
if (CollectionUtils.isEmpty(elementDefs)) { if (CollectionUtils.isEmpty(elementDefs)) {
this.elementDefs = new ArrayList<AtlasEnumElementDef>(); this.elementDefs = new ArrayList<>();
} else { } else {
// if multiple elements with same value are present, keep only the last entry // if multiple elements with same value are present, keep only the last entry
List<AtlasEnumElementDef> tmpList = new ArrayList<AtlasEnumElementDef>(elementDefs.size()); List<AtlasEnumElementDef> tmpList = new ArrayList<>(elementDefs.size());
Set<String> elementValues = new HashSet<String>(); Set<String> elementValues = new HashSet<>();
ListIterator<AtlasEnumElementDef> iter = elementDefs.listIterator(elementDefs.size()); ListIterator<AtlasEnumElementDef> iter = elementDefs.listIterator(elementDefs.size());
while (iter.hasPrevious()) { while (iter.hasPrevious()) {
...@@ -149,7 +149,7 @@ public class AtlasEnumDef extends AtlasBaseTypeDef implements Serializable { ...@@ -149,7 +149,7 @@ public class AtlasEnumDef extends AtlasBaseTypeDef implements Serializable {
public void addElement(AtlasEnumElementDef elementDef) { public void addElement(AtlasEnumElementDef elementDef) {
List<AtlasEnumElementDef> e = this.elementDefs; List<AtlasEnumElementDef> e = this.elementDefs;
List<AtlasEnumElementDef> tmpList = new ArrayList<AtlasEnumElementDef>(); List<AtlasEnumElementDef> tmpList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(e)) { if (CollectionUtils.isNotEmpty(e)) {
// copy existing elements, except ones having same value as the element being added // copy existing elements, except ones having same value as the element being added
for (AtlasEnumElementDef existingElem : e) { for (AtlasEnumElementDef existingElem : e) {
...@@ -168,7 +168,7 @@ public class AtlasEnumDef extends AtlasBaseTypeDef implements Serializable { ...@@ -168,7 +168,7 @@ public class AtlasEnumDef extends AtlasBaseTypeDef implements Serializable {
// if element doesn't exist, no need to create the tmpList below // if element doesn't exist, no need to create the tmpList below
if (hasElement(e, elemValue)) { if (hasElement(e, elemValue)) {
List<AtlasEnumElementDef> tmpList = new ArrayList<AtlasEnumElementDef>(); List<AtlasEnumElementDef> tmpList = new ArrayList<>();
// copy existing elements, except ones having same value as the element being removed // copy existing elements, except ones having same value as the element being removed
for (AtlasEnumElementDef existingElem : e) { for (AtlasEnumElementDef existingElem : e) {
......
...@@ -106,11 +106,11 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable { ...@@ -106,11 +106,11 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
} }
if (CollectionUtils.isEmpty(attributeDefs)) { if (CollectionUtils.isEmpty(attributeDefs)) {
this.attributeDefs = new ArrayList<AtlasAttributeDef>(); this.attributeDefs = new ArrayList<>();
} else { } else {
// if multiple attributes with same name are present, keep only the last entry // if multiple attributes with same name are present, keep only the last entry
List<AtlasAttributeDef> tmpList = new ArrayList<AtlasAttributeDef>(attributeDefs.size()); List<AtlasAttributeDef> tmpList = new ArrayList<>(attributeDefs.size());
Set<String> attribNames = new HashSet<String>(); Set<String> attribNames = new HashSet<>();
ListIterator<AtlasAttributeDef> iter = attributeDefs.listIterator(attributeDefs.size()); ListIterator<AtlasAttributeDef> iter = attributeDefs.listIterator(attributeDefs.size());
while (iter.hasPrevious()) { while (iter.hasPrevious()) {
...@@ -144,7 +144,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable { ...@@ -144,7 +144,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
List<AtlasAttributeDef> a = this.attributeDefs; List<AtlasAttributeDef> a = this.attributeDefs;
List<AtlasAttributeDef> tmpList = new ArrayList<AtlasAttributeDef>(); List<AtlasAttributeDef> tmpList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(a)) { if (CollectionUtils.isNotEmpty(a)) {
// copy existing attributes, except ones having same name as the attribute being added // copy existing attributes, except ones having same name as the attribute being added
for (AtlasAttributeDef existingAttrDef : a) { for (AtlasAttributeDef existingAttrDef : a) {
...@@ -162,7 +162,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable { ...@@ -162,7 +162,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
List<AtlasAttributeDef> a = this.attributeDefs; List<AtlasAttributeDef> a = this.attributeDefs;
if (hasAttribute(a, attrName)) { if (hasAttribute(a, attrName)) {
List<AtlasAttributeDef> tmpList = new ArrayList<AtlasAttributeDef>(); List<AtlasAttributeDef> tmpList = new ArrayList<>();
// copy existing attributes, except ones having same name as the attribute being removed // copy existing attributes, except ones having same name as the attribute being removed
for (AtlasAttributeDef existingAttrDef : a) { for (AtlasAttributeDef existingAttrDef : a) {
...@@ -256,7 +256,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable { ...@@ -256,7 +256,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
/** /**
* single-valued attribute or multi-valued attribute. * single-valued attribute or multi-valued attribute.
*/ */
public enum Cardinality { SINGLE, LIST, SET }; public enum Cardinality { SINGLE, LIST, SET }
public static final int COUNT_NOT_SET = -1; public static final int COUNT_NOT_SET = -1;
...@@ -376,7 +376,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable { ...@@ -376,7 +376,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
if (CollectionUtils.isEmpty(constraintDefs)) { if (CollectionUtils.isEmpty(constraintDefs)) {
this.constraintDefs = null; this.constraintDefs = null;
} else { } else {
this.constraintDefs = new ArrayList<AtlasConstraintDef>(constraintDefs); this.constraintDefs = new ArrayList<>(constraintDefs);
} }
} }
...@@ -482,7 +482,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable { ...@@ -482,7 +482,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
this.type = type; this.type = type;
if (params != null) { if (params != null) {
this.params = new HashMap<String, Object>(params); this.params = new HashMap<>(params);
} }
} }
...@@ -491,7 +491,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable { ...@@ -491,7 +491,7 @@ public class AtlasStructDef extends AtlasBaseTypeDef implements Serializable {
this.type = that.type; this.type = that.type;
if (that.params != null) { if (that.params != null) {
this.params = new HashMap<String, Object>(that.params); this.params = new HashMap<>(that.params);
} }
} }
} }
......
...@@ -111,8 +111,8 @@ public class AtlasArrayType extends AtlasType { ...@@ -111,8 +111,8 @@ public class AtlasArrayType extends AtlasType {
} }
@Override @Override
public Collection<? extends Object> createDefaultValue() { public Collection<?> createDefaultValue() {
Collection<Object> ret = new ArrayList<Object>(); Collection<Object> ret = new ArrayList<>();
ret.add(elementType.createDefaultValue()); ret.add(elementType.createDefaultValue());
...@@ -161,13 +161,13 @@ public class AtlasArrayType extends AtlasType { ...@@ -161,13 +161,13 @@ public class AtlasArrayType extends AtlasType {
} }
@Override @Override
public Collection<? extends Object> getNormalizedValue(Object obj) { public Collection<?> getNormalizedValue(Object obj) {
if (obj == null) { if (obj == null) {
return null; return null;
} }
if (obj instanceof List || obj instanceof Set) { if (obj instanceof List || obj instanceof Set) {
List<Object> ret = new ArrayList<Object>(); List<Object> ret = new ArrayList<>();
Collection objList = (Collection) obj; Collection objList = (Collection) obj;
...@@ -191,7 +191,7 @@ public class AtlasArrayType extends AtlasType { ...@@ -191,7 +191,7 @@ public class AtlasArrayType extends AtlasType {
return ret; return ret;
} else if (obj.getClass().isArray()) { } else if (obj.getClass().isArray()) {
List<Object> ret = new ArrayList<Object>(); List<Object> ret = new ArrayList<>();
int arrayLen = Array.getLength(obj); int arrayLen = Array.getLength(obj);
......
...@@ -73,7 +73,7 @@ public class AtlasBuiltInTypes { ...@@ -73,7 +73,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of byte type. * class that implements behaviour of byte type.
*/ */
public static class AtlasByteType extends AtlasType { public static class AtlasByteType extends AtlasType {
private static final Byte DEFAULT_VALUE = new Byte((byte)0); private static final Byte DEFAULT_VALUE = (byte) 0;
public AtlasByteType() { public AtlasByteType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_BYTE, TypeCategory.PRIMITIVE); super(AtlasBaseTypeDef.ATLAS_TYPE_BYTE, TypeCategory.PRIMITIVE);
...@@ -117,7 +117,7 @@ public class AtlasBuiltInTypes { ...@@ -117,7 +117,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of short type. * class that implements behaviour of short type.
*/ */
public static class AtlasShortType extends AtlasType { public static class AtlasShortType extends AtlasType {
private static final Short DEFAULT_VALUE = new Short((short)0); private static final Short DEFAULT_VALUE = (short) 0;
public AtlasShortType() { public AtlasShortType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_SHORT, TypeCategory.PRIMITIVE); super(AtlasBaseTypeDef.ATLAS_TYPE_SHORT, TypeCategory.PRIMITIVE);
...@@ -161,7 +161,7 @@ public class AtlasBuiltInTypes { ...@@ -161,7 +161,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of integer type. * class that implements behaviour of integer type.
*/ */
public static class AtlasIntType extends AtlasType { public static class AtlasIntType extends AtlasType {
private static final Integer DEFAULT_VALUE = new Integer(0); private static final Integer DEFAULT_VALUE = 0;
public AtlasIntType() { public AtlasIntType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_INT, TypeCategory.PRIMITIVE); super(AtlasBaseTypeDef.ATLAS_TYPE_INT, TypeCategory.PRIMITIVE);
...@@ -205,7 +205,7 @@ public class AtlasBuiltInTypes { ...@@ -205,7 +205,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of long type. * class that implements behaviour of long type.
*/ */
public static class AtlasLongType extends AtlasType { public static class AtlasLongType extends AtlasType {
private static final Long DEFAULT_VALUE = new Long(0); private static final Long DEFAULT_VALUE = 0L;
public AtlasLongType() { public AtlasLongType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_LONG, TypeCategory.PRIMITIVE); super(AtlasBaseTypeDef.ATLAS_TYPE_LONG, TypeCategory.PRIMITIVE);
...@@ -249,7 +249,7 @@ public class AtlasBuiltInTypes { ...@@ -249,7 +249,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of float type. * class that implements behaviour of float type.
*/ */
public static class AtlasFloatType extends AtlasType { public static class AtlasFloatType extends AtlasType {
private static final Float DEFAULT_VALUE = new Float(0); private static final Float DEFAULT_VALUE = 0f;
public AtlasFloatType() { public AtlasFloatType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_FLOAT, TypeCategory.PRIMITIVE); super(AtlasBaseTypeDef.ATLAS_TYPE_FLOAT, TypeCategory.PRIMITIVE);
...@@ -293,7 +293,7 @@ public class AtlasBuiltInTypes { ...@@ -293,7 +293,7 @@ public class AtlasBuiltInTypes {
* class that implements behaviour of double type. * class that implements behaviour of double type.
*/ */
public static class AtlasDoubleType extends AtlasType { public static class AtlasDoubleType extends AtlasType {
private static final Double DEFAULT_VALUE = new Double(0); private static final Double DEFAULT_VALUE = 0d;
public AtlasDoubleType() { public AtlasDoubleType() {
super(AtlasBaseTypeDef.ATLAS_TYPE_DOUBLE, TypeCategory.PRIMITIVE); super(AtlasBaseTypeDef.ATLAS_TYPE_DOUBLE, TypeCategory.PRIMITIVE);
......
...@@ -20,7 +20,6 @@ package org.apache.atlas.type; ...@@ -20,7 +20,6 @@ package org.apache.atlas.type;
import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.instance.AtlasClassification; import org.apache.atlas.model.instance.AtlasClassification;
import org.apache.atlas.model.typedef.AtlasClassificationDef; import org.apache.atlas.model.typedef.AtlasClassificationDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef; import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
...@@ -146,11 +145,11 @@ public class AtlasClassificationType extends AtlasStructType { ...@@ -146,11 +145,11 @@ public class AtlasClassificationType extends AtlasStructType {
} }
public boolean isSuperTypeOf(AtlasClassificationType classificationType) { public boolean isSuperTypeOf(AtlasClassificationType classificationType) {
return classificationType != null ? classificationType.getAllSuperTypes().contains(this.getTypeName()) : false; return classificationType != null && classificationType.getAllSuperTypes().contains(this.getTypeName());
} }
public boolean isSubTypeOf(AtlasClassificationType classificationType) { public boolean isSubTypeOf(AtlasClassificationType classificationType) {
return classificationType != null ? allSuperTypes.contains(classificationType.getTypeName()) : false; return classificationType != null && allSuperTypes.contains(classificationType.getTypeName());
} }
@Override @Override
......
...@@ -20,7 +20,6 @@ package org.apache.atlas.type; ...@@ -20,7 +20,6 @@ package org.apache.atlas.type;
import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.instance.AtlasEntity; import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.typedef.AtlasEntityDef; import org.apache.atlas.model.typedef.AtlasEntityDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef; import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
...@@ -144,11 +143,11 @@ public class AtlasEntityType extends AtlasStructType { ...@@ -144,11 +143,11 @@ public class AtlasEntityType extends AtlasStructType {
} }
public boolean isSuperTypeOf(AtlasEntityType entityType) { public boolean isSuperTypeOf(AtlasEntityType entityType) {
return entityType != null ? entityType.getAllSuperTypes().contains(this.getTypeName()) : false; return entityType != null && entityType.getAllSuperTypes().contains(this.getTypeName());
} }
public boolean isSubTypeOf(AtlasEntityType entityType) { public boolean isSubTypeOf(AtlasEntityType entityType) {
return entityType != null ? allSuperTypes.contains(entityType.getTypeName()) : false; return entityType != null && allSuperTypes.contains(entityType.getTypeName());
} }
@Override @Override
......
...@@ -23,7 +23,6 @@ import java.util.HashMap; ...@@ -23,7 +23,6 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.typedef.AtlasEnumDef; import org.apache.atlas.model.typedef.AtlasEnumDef;
import org.apache.atlas.model.typedef.AtlasEnumDef.AtlasEnumElementDef; import org.apache.atlas.model.typedef.AtlasEnumDef.AtlasEnumElementDef;
...@@ -39,7 +38,7 @@ public class AtlasEnumType extends AtlasType { ...@@ -39,7 +38,7 @@ public class AtlasEnumType extends AtlasType {
public AtlasEnumType(AtlasEnumDef enumDef) { public AtlasEnumType(AtlasEnumDef enumDef) {
super(enumDef); super(enumDef);
Map<String, AtlasEnumElementDef> e = new HashMap<String, AtlasEnumElementDef>(); Map<String, AtlasEnumElementDef> e = new HashMap<>();
for (AtlasEnumElementDef elementDef : enumDef.getElementDefs()) { for (AtlasEnumElementDef elementDef : enumDef.getElementDefs()) {
e.put(elementDef.getValue().toLowerCase(), elementDef); e.put(elementDef.getValue().toLowerCase(), elementDef);
......
...@@ -93,7 +93,7 @@ public class AtlasMapType extends AtlasType { ...@@ -93,7 +93,7 @@ public class AtlasMapType extends AtlasType {
@Override @Override
public Map<Object, Object> createDefaultValue() { public Map<Object, Object> createDefaultValue() {
Map<Object, Object> ret = new HashMap<Object, Object>(); Map<Object, Object> ret = new HashMap<>();
ret.put(keyType.createDefaultValue(), valueType.createDefaultValue()); ret.put(keyType.createDefaultValue(), valueType.createDefaultValue());
...@@ -126,7 +126,7 @@ public class AtlasMapType extends AtlasType { ...@@ -126,7 +126,7 @@ public class AtlasMapType extends AtlasType {
} }
if (obj instanceof Map) { if (obj instanceof Map) {
Map<Object, Object> ret = new HashMap<Object, Object>(); Map<Object, Object> ret = new HashMap<>();
Map<Object, Objects> map = (Map<Object, Objects>) obj; Map<Object, Objects> map = (Map<Object, Objects>) obj;
......
...@@ -51,7 +51,7 @@ public class AtlasStructType extends AtlasType { ...@@ -51,7 +51,7 @@ public class AtlasStructType extends AtlasType {
private Map<String, AtlasType> attrTypes = Collections.emptyMap(); private Map<String, AtlasType> attrTypes = Collections.emptyMap();
private Set<String> foreignKeyAttributes = new HashSet<>(); private Set<String> foreignKeyAttributes = new HashSet<>();
private Map<String, TypeAttributePair> mappedFromRefAttributes = new HashMap<String, TypeAttributePair>(); private Map<String, TypeAttributePair> mappedFromRefAttributes = new HashMap<>();
public AtlasStructType(AtlasStructDef structDef) { public AtlasStructType(AtlasStructDef structDef) {
...@@ -101,7 +101,7 @@ public class AtlasStructType extends AtlasType { ...@@ -101,7 +101,7 @@ public class AtlasStructType extends AtlasType {
@Override @Override
public void resolveReferences(AtlasTypeRegistry typeRegistry) throws AtlasBaseException { public void resolveReferences(AtlasTypeRegistry typeRegistry) throws AtlasBaseException {
Map<String, AtlasType> a = new HashMap<String, AtlasType>(); Map<String, AtlasType> a = new HashMap<>();
for (AtlasAttributeDef attributeDef : structDef.getAttributeDefs()) { for (AtlasAttributeDef attributeDef : structDef.getAttributeDefs()) {
AtlasType attrType = typeRegistry.getType(attributeDef.getTypeName()); AtlasType attrType = typeRegistry.getType(attributeDef.getTypeName());
...@@ -275,7 +275,7 @@ public class AtlasStructType extends AtlasType { ...@@ -275,7 +275,7 @@ public class AtlasStructType extends AtlasType {
Map<String, Object> attributes = obj.getAttributes(); Map<String, Object> attributes = obj.getAttributes();
if (attributes == null) { if (attributes == null) {
attributes = new HashMap<String, Object>(); attributes = new HashMap<>();
} }
for (AtlasAttributeDef attributeDef : structDef.getAttributeDefs()) { for (AtlasAttributeDef attributeDef : structDef.getAttributeDefs()) {
...@@ -348,11 +348,14 @@ public class AtlasStructType extends AtlasType { ...@@ -348,11 +348,14 @@ public class AtlasStructType extends AtlasType {
continue; continue;
} }
if (constraintType.equals(AtlasConstraintDef.CONSTRAINT_TYPE_FOREIGN_KEY)) { switch (constraintType) {
case AtlasConstraintDef.CONSTRAINT_TYPE_FOREIGN_KEY:
resolveForeignKeyConstraint(attribDef, constraintDef, attribType); resolveForeignKeyConstraint(attribDef, constraintDef, attribType);
} else if (constraintType.equals(CONSTRAINT_TYPE_MAPPED_FROM_REF)) { break;
case CONSTRAINT_TYPE_MAPPED_FROM_REF:
resolveMappedFromRefConstraint(attribDef, constraintDef, attribType); resolveMappedFromRefConstraint(attribDef, constraintDef, attribType);
} else { break;
default:
throw new AtlasBaseException(AtlasErrorCode.UNKNOWN_CONSTRAINT, constraintType, throw new AtlasBaseException(AtlasErrorCode.UNKNOWN_CONSTRAINT, constraintType,
getTypeName(), attribDef.getName()); getTypeName(), attribDef.getName());
} }
......
...@@ -539,13 +539,13 @@ public class AtlasTypeRegistry { ...@@ -539,13 +539,13 @@ public class AtlasTypeRegistry {
LOG.debug("==> AtlasTypeRegistry.updateType({})", typeDef); LOG.debug("==> AtlasTypeRegistry.updateType({})", typeDef);
} }
if (typeDef == null) { if (typeDef != null) {
// ignore if (StringUtils.isNotBlank(typeDef.getGuid())) {
} else if (StringUtils.isNotBlank(typeDef.getGuid())) {
updateTypeByGuidWithNoRefResolve(typeDef.getGuid(), typeDef); updateTypeByGuidWithNoRefResolve(typeDef.getGuid(), typeDef);
} else if (StringUtils.isNotBlank(typeDef.getName())) { } else if (StringUtils.isNotBlank(typeDef.getName())) {
updateTypeByNameWithNoRefResolve(typeDef.getName(), typeDef); updateTypeByNameWithNoRefResolve(typeDef.getName(), typeDef);
} }
}
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("<== AtlasTypeRegistry.updateType({})", typeDef); LOG.debug("<== AtlasTypeRegistry.updateType({})", typeDef);
...@@ -694,15 +694,13 @@ class TypeCache { ...@@ -694,15 +694,13 @@ class TypeCache {
} }
public AtlasType getTypeByGuid(String guid) { public AtlasType getTypeByGuid(String guid) {
AtlasType ret = guid != null ? typeGuidMap.get(guid) : null;
return ret; return guid != null ? typeGuidMap.get(guid) : null;
} }
public AtlasType getTypeByName(String name) { public AtlasType getTypeByName(String name) {
AtlasType ret = name != null ? typeNameMap.get(name) : null;
return ret; return name != null ? typeNameMap.get(name) : null;
} }
public void updateGuid(String typeName, String currGuid, String newGuid) { public void updateGuid(String typeName, String currGuid, String newGuid) {
...@@ -768,15 +766,13 @@ class TypeDefCache<T extends AtlasBaseTypeDef> { ...@@ -768,15 +766,13 @@ class TypeDefCache<T extends AtlasBaseTypeDef> {
} }
public T getTypeDefByGuid(String guid) { public T getTypeDefByGuid(String guid) {
T ret = guid != null ? typeDefGuidMap.get(guid) : null;
return ret; return guid != null ? typeDefGuidMap.get(guid) : null;
} }
public T getTypeDefByName(String name) { public T getTypeDefByName(String name) {
T ret = name != null ? typeDefNameMap.get(name) : null;
return ret; return name != null ? typeDefNameMap.get(name) : null;
} }
public void updateGuid(String typeName, String newGuid) { public void updateGuid(String typeName, String newGuid) {
......
...@@ -51,14 +51,14 @@ import static org.apache.atlas.model.typedef.AtlasBaseTypeDef.ATLAS_TYPE_MAP_SUF ...@@ -51,14 +51,14 @@ import static org.apache.atlas.model.typedef.AtlasBaseTypeDef.ATLAS_TYPE_MAP_SUF
* Utility methods for AtlasType/AtlasTypeDef. * Utility methods for AtlasType/AtlasTypeDef.
*/ */
public class AtlasTypeUtil { public class AtlasTypeUtil {
private static final Set<String> ATLAS_BUILTIN_TYPENAMES = new HashSet<String>(); private static final Set<String> ATLAS_BUILTIN_TYPENAMES = new HashSet<>();
static { static {
Collections.addAll(ATLAS_BUILTIN_TYPENAMES, AtlasBaseTypeDef.ATLAS_BUILTIN_TYPES); Collections.addAll(ATLAS_BUILTIN_TYPENAMES, AtlasBaseTypeDef.ATLAS_BUILTIN_TYPES);
} }
public static Set<String> getReferencedTypeNames(String typeName) { public static Set<String> getReferencedTypeNames(String typeName) {
Set<String> ret = new HashSet<String>(); Set<String> ret = new HashSet<>();
getReferencedTypeNames(typeName, ret); getReferencedTypeNames(typeName, ret);
......
...@@ -656,7 +656,7 @@ public final class TestUtilsV2 { ...@@ -656,7 +656,7 @@ public final class TestUtilsV2 {
AtlasTypeUtil.createRequiredAttrDef("level", "int")); AtlasTypeUtil.createRequiredAttrDef("level", "int"));
AtlasClassificationDef janitorSecurityClearanceTypeDef = AtlasClassificationDef janitorSecurityClearanceTypeDef =
AtlasTypeUtil.createTraitTypeDef("JanitorClearance", "JanitorClearance_description", ImmutableSet.<String>of("SecurityClearance1"), AtlasTypeUtil.createTraitTypeDef("JanitorClearance", "JanitorClearance_description", ImmutableSet.of("SecurityClearance1"),
AtlasTypeUtil.createRequiredAttrDef("level", "int")); AtlasTypeUtil.createRequiredAttrDef("level", "int"));
return Arrays.asList(securityClearanceTypeDef, janitorSecurityClearanceTypeDef); return Arrays.asList(securityClearanceTypeDef, janitorSecurityClearanceTypeDef);
......
...@@ -357,28 +357,28 @@ public final class ModelTestUtil { ...@@ -357,28 +357,28 @@ public final class ModelTestUtil {
} }
public static List<AtlasAttributeDef> newAttributeDefsWithAllBuiltInTypes(String attrNamePrefix) { public static List<AtlasAttributeDef> newAttributeDefsWithAllBuiltInTypes(String attrNamePrefix) {
List<AtlasAttributeDef> ret = new ArrayList<AtlasAttributeDef>(); List<AtlasAttributeDef> ret = new ArrayList<>();
// add all built-in types // add all built-in types
for (int i = 0; i < ATLAS_BUILTIN_TYPES.length; i++) { for (String ATLAS_BUILTIN_TYPE2 : ATLAS_BUILTIN_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, ATLAS_BUILTIN_TYPES[i])); ret.add(getAttributeDef(attrNamePrefix, ATLAS_BUILTIN_TYPE2));
} }
// add enum types // add enum types
ret.add(getAttributeDef(attrNamePrefix, ENUM_DEF.getName())); ret.add(getAttributeDef(attrNamePrefix, ENUM_DEF.getName()));
ret.add(getAttributeDef(attrNamePrefix, ENUM_DEF_WITH_NO_DEFAULT.getName())); ret.add(getAttributeDef(attrNamePrefix, ENUM_DEF_WITH_NO_DEFAULT.getName()));
// add array of built-in types // add array of built-in types
for (int i = 0; i < ATLAS_BUILTIN_TYPES.length; i++) { for (String ATLAS_BUILTIN_TYPE1 : ATLAS_BUILTIN_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ATLAS_BUILTIN_TYPES[i]))); ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ATLAS_BUILTIN_TYPE1)));
} }
// add array of enum types // add array of enum types
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ENUM_DEF.getName()))); ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ENUM_DEF.getName())));
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ENUM_DEF_WITH_NO_DEFAULT.getName()))); ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ENUM_DEF_WITH_NO_DEFAULT.getName())));
// add few map types // add few map types
for (int i = 0; i < ATLAS_PRIMITIVE_TYPES.length; i++) { for (String ATLAS_PRIMITIVE_TYPE3 : ATLAS_PRIMITIVE_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, ret.add(getAttributeDef(attrNamePrefix,
AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPES[i], getRandomBuiltInType()))); AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPE3, getRandomBuiltInType())));
} }
// add map types with enum as key // add map types with enum as key
ret.add(getAttributeDef(attrNamePrefix, ret.add(getAttributeDef(attrNamePrefix,
...@@ -392,7 +392,7 @@ public final class ModelTestUtil { ...@@ -392,7 +392,7 @@ public final class ModelTestUtil {
AtlasBaseTypeDef.getMapTypeName(getRandomPrimitiveType(), ENUM_DEF_WITH_NO_DEFAULT.getName()))); AtlasBaseTypeDef.getMapTypeName(getRandomPrimitiveType(), ENUM_DEF_WITH_NO_DEFAULT.getName())));
// add few array of arrays // add few array of arrays
for (int i = 0; i < ATLAS_BUILTIN_TYPES.length; i++) { for (String ATLAS_BUILTIN_TYPE : ATLAS_BUILTIN_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, ret.add(getAttributeDef(attrNamePrefix,
AtlasBaseTypeDef.getArrayTypeName(AtlasBaseTypeDef.getArrayTypeName(getRandomBuiltInType())))); AtlasBaseTypeDef.getArrayTypeName(AtlasBaseTypeDef.getArrayTypeName(getRandomBuiltInType()))));
} }
...@@ -400,9 +400,9 @@ public final class ModelTestUtil { ...@@ -400,9 +400,9 @@ public final class ModelTestUtil {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ENUM_DEF_WITH_NO_DEFAULT.getName()))); ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(ENUM_DEF_WITH_NO_DEFAULT.getName())));
// add few array of maps // add few array of maps
for (int i = 0; i < ATLAS_PRIMITIVE_TYPES.length; i++) { for (String ATLAS_PRIMITIVE_TYPE2 : ATLAS_PRIMITIVE_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName( ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(
AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPES[i], getRandomBuiltInType())))); AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPE2, getRandomBuiltInType()))));
} }
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName( ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getArrayTypeName(
AtlasBaseTypeDef.getMapTypeName(ENUM_DEF.getName(), getRandomBuiltInType())))); AtlasBaseTypeDef.getMapTypeName(ENUM_DEF.getName(), getRandomBuiltInType()))));
...@@ -414,15 +414,15 @@ public final class ModelTestUtil { ...@@ -414,15 +414,15 @@ public final class ModelTestUtil {
AtlasBaseTypeDef.getMapTypeName(getRandomPrimitiveType(), ENUM_DEF_WITH_NO_DEFAULT.getName())))); AtlasBaseTypeDef.getMapTypeName(getRandomPrimitiveType(), ENUM_DEF_WITH_NO_DEFAULT.getName()))));
// add few map of arrays // add few map of arrays
for (int i = 0; i < ATLAS_PRIMITIVE_TYPES.length; i++) { for (String ATLAS_PRIMITIVE_TYPE1 : ATLAS_PRIMITIVE_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPES[i], ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPE1,
AtlasBaseTypeDef.getArrayTypeName(getRandomBuiltInType())))); AtlasBaseTypeDef.getArrayTypeName(getRandomBuiltInType()))));
} }
// add few map of maps // add few map of maps
for (int i = 0; i < ATLAS_PRIMITIVE_TYPES.length; i++) { for (String ATLAS_PRIMITIVE_TYPE : ATLAS_PRIMITIVE_TYPES) {
ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPES[i], ret.add(getAttributeDef(attrNamePrefix, AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPE,
AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPES[i], getRandomBuiltInType())))); AtlasBaseTypeDef.getMapTypeName(ATLAS_PRIMITIVE_TYPE, getRandomBuiltInType()))));
} }
return ret; return ret;
......
...@@ -103,7 +103,7 @@ public class TestAtlasEntityDef { ...@@ -103,7 +103,7 @@ public class TestAtlasEntityDef {
AtlasEntityDef entityDef = ModelTestUtil.newEntityDefWithSuperTypes(); AtlasEntityDef entityDef = ModelTestUtil.newEntityDefWithSuperTypes();
Set<String> oldSuperTypes = entityDef.getSuperTypes(); Set<String> oldSuperTypes = entityDef.getSuperTypes();
Set<String> newSuperTypes = new HashSet<String>(); Set<String> newSuperTypes = new HashSet<>();
newSuperTypes.add("newType-abcd-1234"); newSuperTypes.add("newType-abcd-1234");
......
...@@ -93,7 +93,7 @@ public class TestAtlasEnumDef { ...@@ -93,7 +93,7 @@ public class TestAtlasEnumDef {
AtlasEnumDef enumDef = ModelTestUtil.newEnumDef(); AtlasEnumDef enumDef = ModelTestUtil.newEnumDef();
List<AtlasEnumElementDef> oldElements = enumDef.getElementDefs(); List<AtlasEnumElementDef> oldElements = enumDef.getElementDefs();
List<AtlasEnumElementDef> newElements = new ArrayList<AtlasEnumElementDef>(); List<AtlasEnumElementDef> newElements = new ArrayList<>();
newElements.add(new AtlasEnumElementDef("newElement", "new Element", 100)); newElements.add(new AtlasEnumElementDef("newElement", "new Element", 100));
......
...@@ -33,14 +33,14 @@ public class TestAtlasArrayType { ...@@ -33,14 +33,14 @@ public class TestAtlasArrayType {
private final Object[] invalidValues; private final Object[] invalidValues;
{ {
List<Integer> intList = new ArrayList<Integer>(); List<Integer> intList = new ArrayList<>();
Set<Integer> intSet = new HashSet<Integer>(); Set<Integer> intSet = new HashSet<>();
Integer[] intArray = new Integer[] { 1, 2, 3 }; Integer[] intArray = new Integer[] { 1, 2, 3 };
List<Object> objList = new ArrayList<Object>(); List<Object> objList = new ArrayList<>();
Set<Object> objSet = new HashSet<Object>(); Set<Object> objSet = new HashSet<>();
Object[] objArray = new Object[] { 1, 2, 3 }; Object[] objArray = new Object[] { 1, 2, 3 };
List<String> strList = new ArrayList<String>(); List<String> strList = new ArrayList<>();
Set<String> strSet = new HashSet<String>(); Set<String> strSet = new HashSet<>();
String[] strArray = new String[] { "1", "2", "3" }; String[] strArray = new String[] { "1", "2", "3" };
for (int i = 0; i < 10; i++) { for (int i = 0; i < 10; i++) {
...@@ -105,7 +105,7 @@ public class TestAtlasArrayType { ...@@ -105,7 +105,7 @@ public class TestAtlasArrayType {
@Test @Test
public void testArrayTypeValidateValue() { public void testArrayTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(intArrayType.validateValue(value, "testObj", messages)); assertTrue(intArrayType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -94,7 +94,7 @@ public class TestAtlasBigDecimalType { ...@@ -94,7 +94,7 @@ public class TestAtlasBigDecimalType {
@Test @Test
public void testBigDecimalTypeValidateValue() { public void testBigDecimalTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(bigDecimalType.validateValue(value, "testObj", messages)); assertTrue(bigDecimalType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -95,7 +95,7 @@ public class TestAtlasBigIntegerType { ...@@ -95,7 +95,7 @@ public class TestAtlasBigIntegerType {
@Test @Test
public void testBigIntegerTypeValidateValue() { public void testBigIntegerTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(bigIntegerType.validateValue(value, "testObj", messages)); assertTrue(bigIntegerType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -73,7 +73,7 @@ public class TestAtlasBooleanType { ...@@ -73,7 +73,7 @@ public class TestAtlasBooleanType {
@Test @Test
public void testBooleanTypeValidateValue() { public void testBooleanTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(booleanType.validateValue(value, "testObj", messages)); assertTrue(booleanType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -95,7 +95,7 @@ public class TestAtlasByteType { ...@@ -95,7 +95,7 @@ public class TestAtlasByteType {
@Test @Test
public void testByteTypeValidateValue() { public void testByteTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(byteType.validateValue(value, "testObj", messages)); assertTrue(byteType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -31,8 +31,8 @@ import static org.testng.Assert.*; ...@@ -31,8 +31,8 @@ import static org.testng.Assert.*;
public class TestAtlasClassificationType { public class TestAtlasClassificationType {
private final AtlasClassificationType classificationType; private final AtlasClassificationType classificationType;
private final List<Object> validValues = new ArrayList<Object>(); private final List<Object> validValues = new ArrayList<>();
private final List<Object> invalidValues = new ArrayList<Object>(); private final List<Object> invalidValues = new ArrayList<>();
{ {
classificationType = getClassificationType(ModelTestUtil.getClassificationDefWithSuperTypes()); classificationType = getClassificationType(ModelTestUtil.getClassificationDefWithSuperTypes());
...@@ -55,7 +55,7 @@ public class TestAtlasClassificationType { ...@@ -55,7 +55,7 @@ public class TestAtlasClassificationType {
invalidValues.add(invalidValue2); invalidValues.add(invalidValue2);
invalidValues.add(invalidValue3); invalidValues.add(invalidValue3);
invalidValues.add(new AtlasClassification()); // no values for mandatory attributes invalidValues.add(new AtlasClassification()); // no values for mandatory attributes
invalidValues.add(new HashMap<Object, Object>()); // no values for mandatory attributes invalidValues.add(new HashMap<>()); // no values for mandatory attributes
invalidValues.add(1); // incorrect datatype invalidValues.add(1); // incorrect datatype
invalidValues.add(new HashSet()); // incorrect datatype invalidValues.add(new HashSet()); // incorrect datatype
invalidValues.add(new ArrayList()); // incorrect datatype invalidValues.add(new ArrayList()); // incorrect datatype
...@@ -102,7 +102,7 @@ public class TestAtlasClassificationType { ...@@ -102,7 +102,7 @@ public class TestAtlasClassificationType {
@Test @Test
public void testClassificationTypeValidateValue() { public void testClassificationTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(classificationType.validateValue(value, "testObj", messages)); assertTrue(classificationType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -107,7 +107,7 @@ public class TestAtlasDateType { ...@@ -107,7 +107,7 @@ public class TestAtlasDateType {
@Test @Test
public void testDateTypeValidateValue() { public void testDateTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(dateType.validateValue(value, "testObj", messages)); assertTrue(dateType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -95,7 +95,7 @@ public class TestAtlasDoubleType { ...@@ -95,7 +95,7 @@ public class TestAtlasDoubleType {
@Test @Test
public void testDoubleTypeValidateValue() { public void testDoubleTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(doubleType.validateValue(value, "testObj", messages)); assertTrue(doubleType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -38,8 +38,8 @@ import static org.testng.Assert.*; ...@@ -38,8 +38,8 @@ import static org.testng.Assert.*;
public class TestAtlasEntityType { public class TestAtlasEntityType {
private final AtlasEntityType entityType; private final AtlasEntityType entityType;
private final List<Object> validValues = new ArrayList<Object>(); private final List<Object> validValues = new ArrayList<>();
private final List<Object> invalidValues = new ArrayList<Object>(); private final List<Object> invalidValues = new ArrayList<>();
{ {
entityType = getEntityType(ModelTestUtil.getEntityDefWithSuperTypes()); entityType = getEntityType(ModelTestUtil.getEntityDefWithSuperTypes());
...@@ -62,7 +62,7 @@ public class TestAtlasEntityType { ...@@ -62,7 +62,7 @@ public class TestAtlasEntityType {
invalidValues.add(invalidValue2); invalidValues.add(invalidValue2);
invalidValues.add(invalidValue3); invalidValues.add(invalidValue3);
invalidValues.add(new AtlasEntity()); // no values for mandatory attributes invalidValues.add(new AtlasEntity()); // no values for mandatory attributes
invalidValues.add(new HashMap<Object, Object>()); // no values for mandatory attributes invalidValues.add(new HashMap<>()); // no values for mandatory attributes
invalidValues.add(1); // incorrect datatype invalidValues.add(1); // incorrect datatype
invalidValues.add(new HashSet()); // incorrect datatype invalidValues.add(new HashSet()); // incorrect datatype
invalidValues.add(new ArrayList()); // incorrect datatype invalidValues.add(new ArrayList()); // incorrect datatype
...@@ -109,7 +109,7 @@ public class TestAtlasEntityType { ...@@ -109,7 +109,7 @@ public class TestAtlasEntityType {
@Test @Test
public void testEntityTypeValidateValue() { public void testEntityTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(entityType.validateValue(value, "testObj", messages)); assertTrue(entityType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -95,7 +95,7 @@ public class TestAtlasFloatType { ...@@ -95,7 +95,7 @@ public class TestAtlasFloatType {
@Test @Test
public void testFloatTypeValidateValue() { public void testFloatTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(floatType.validateValue(value, "testObj", messages)); assertTrue(floatType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -95,7 +95,7 @@ public class TestAtlasIntType { ...@@ -95,7 +95,7 @@ public class TestAtlasIntType {
@Test @Test
public void testIntTypeValidateValue() { public void testIntTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(intType.validateValue(value, "testObj", messages)); assertTrue(intType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -95,7 +95,7 @@ public class TestAtlasLongType { ...@@ -95,7 +95,7 @@ public class TestAtlasLongType {
@Test @Test
public void testLongTypeValidateValue() { public void testLongTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(longType.validateValue(value, "testObj", messages)); assertTrue(longType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -34,13 +34,13 @@ public class TestAtlasMapType { ...@@ -34,13 +34,13 @@ public class TestAtlasMapType {
private final Object[] invalidValues; private final Object[] invalidValues;
{ {
Map<String, Integer> strIntMap = new HashMap<String, Integer>(); Map<String, Integer> strIntMap = new HashMap<>();
Map<String, Double> strDoubleMap = new HashMap<String, Double>(); Map<String, Double> strDoubleMap = new HashMap<>();
Map<String, String> strStringMap = new HashMap<String, String>(); Map<String, String> strStringMap = new HashMap<>();
Map<Integer, Integer> intIntMap = new HashMap<Integer, Integer>(); Map<Integer, Integer> intIntMap = new HashMap<>();
Map<Object, Object> objObjMap = new HashMap<Object, Object>(); Map<Object, Object> objObjMap = new HashMap<>();
Map<Object, Object> invObjObjMap1 = new HashMap<Object, Object>(); Map<Object, Object> invObjObjMap1 = new HashMap<>();
Map<Object, Object> invObjObjMap2 = new HashMap<Object, Object>(); Map<Object, Object> invObjObjMap2 = new HashMap<>();
for (int i = 0; i < 10; i++) { for (int i = 0; i < 10; i++) {
strIntMap.put(Integer.toString(i), i); strIntMap.put(Integer.toString(i), i);
...@@ -54,7 +54,7 @@ public class TestAtlasMapType { ...@@ -54,7 +54,7 @@ public class TestAtlasMapType {
invObjObjMap2.put("123", "xyz"); // invalid value invObjObjMap2.put("123", "xyz"); // invalid value
validValues = new Object[] { validValues = new Object[] {
null, new HashMap<String, Integer>(), new HashMap<Object, Object>(), strIntMap, strDoubleMap, strStringMap, null, new HashMap<String, Integer>(), new HashMap<>(), strIntMap, strDoubleMap, strStringMap,
intIntMap, objObjMap, intIntMap, objObjMap,
}; };
...@@ -101,7 +101,7 @@ public class TestAtlasMapType { ...@@ -101,7 +101,7 @@ public class TestAtlasMapType {
@Test @Test
public void testMapTypeValidateValue() { public void testMapTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(intIntMapType.validateValue(value, "testObj", messages)); assertTrue(intIntMapType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -37,11 +37,11 @@ public class TestAtlasObjectIdType { ...@@ -37,11 +37,11 @@ public class TestAtlasObjectIdType {
private final Object[] invalidValues; private final Object[] invalidValues;
{ {
Map<String, String> objectId1 = new HashMap<String, String>(); Map<String, String> objectId1 = new HashMap<>();
Map<Object, Object> objectId2 = new HashMap<Object, Object>(); Map<Object, Object> objectId2 = new HashMap<>();
Map<Object, Object> objectId3 = new HashMap<Object, Object>(); Map<Object, Object> objectId3 = new HashMap<>();
Map<Object, Object> objectId4 = new HashMap<Object, Object>(); Map<Object, Object> objectId4 = new HashMap<>();
Map<Object, Object> objectId5 = new HashMap<Object, Object>(); Map<Object, Object> objectId5 = new HashMap<>();
objectId1.put(AtlasObjectId.KEY_TYPENAME, "testType"); objectId1.put(AtlasObjectId.KEY_TYPENAME, "testType");
objectId1.put(AtlasObjectId.KEY_GUID, "guid-1234"); objectId1.put(AtlasObjectId.KEY_GUID, "guid-1234");
...@@ -107,7 +107,7 @@ public class TestAtlasObjectIdType { ...@@ -107,7 +107,7 @@ public class TestAtlasObjectIdType {
@Test @Test
public void testObjectIdTypeValidateValue() { public void testObjectIdTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(objectIdType.validateValue(value, "testObj", messages)); assertTrue(objectIdType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -95,7 +95,7 @@ public class TestAtlasShortType { ...@@ -95,7 +95,7 @@ public class TestAtlasShortType {
@Test @Test
public void testShortTypeValidateValue() { public void testShortTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(shortType.validateValue(value, "testObj", messages)); assertTrue(shortType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -75,7 +75,7 @@ public class TestAtlasStringType { ...@@ -75,7 +75,7 @@ public class TestAtlasStringType {
@Test @Test
public void testStringTypeValidateValue() { public void testStringTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(stringType.validateValue(value, "testObj", messages)); assertTrue(stringType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -78,8 +78,8 @@ public class TestAtlasStructType { ...@@ -78,8 +78,8 @@ public class TestAtlasStructType {
structDef.addAttribute(multiValuedAttribMax); structDef.addAttribute(multiValuedAttribMax);
structType = getStructType(structDef); structType = getStructType(structDef);
validValues = new ArrayList<Object>(); validValues = new ArrayList<>();
invalidValues = new ArrayList<Object>(); invalidValues = new ArrayList<>();
AtlasStruct invalidValue1 = structType.createDefaultValue(); AtlasStruct invalidValue1 = structType.createDefaultValue();
AtlasStruct invalidValue2 = structType.createDefaultValue(); AtlasStruct invalidValue2 = structType.createDefaultValue();
...@@ -121,7 +121,7 @@ public class TestAtlasStructType { ...@@ -121,7 +121,7 @@ public class TestAtlasStructType {
invalidValues.add(invalidValue6); invalidValues.add(invalidValue6);
invalidValues.add(invalidValue7); invalidValues.add(invalidValue7);
invalidValues.add(new AtlasStruct()); // no values for mandatory attributes invalidValues.add(new AtlasStruct()); // no values for mandatory attributes
invalidValues.add(new HashMap<Object, Object>()); // no values for mandatory attributes invalidValues.add(new HashMap<>()); // no values for mandatory attributes
invalidValues.add(1); // incorrect datatype invalidValues.add(1); // incorrect datatype
invalidValues.add(new HashSet()); // incorrect datatype invalidValues.add(new HashSet()); // incorrect datatype
invalidValues.add(new ArrayList()); // incorrect datatype invalidValues.add(new ArrayList()); // incorrect datatype
...@@ -168,7 +168,7 @@ public class TestAtlasStructType { ...@@ -168,7 +168,7 @@ public class TestAtlasStructType {
@Test @Test
public void testStructTypeValidateValue() { public void testStructTypeValidateValue() {
List<String> messages = new ArrayList<String>(); List<String> messages = new ArrayList<>();
for (Object value : validValues) { for (Object value : validValues) {
assertTrue(structType.validateValue(value, "testObj", messages)); assertTrue(structType.validateValue(value, "testObj", messages));
assertEquals(messages.size(), 0, "value=" + value); assertEquals(messages.size(), 0, "value=" + value);
......
...@@ -277,7 +277,7 @@ public class KafkaNotification extends AbstractNotification implements Service { ...@@ -277,7 +277,7 @@ public class KafkaNotification extends AbstractNotification implements Service {
protected <T> org.apache.atlas.kafka.KafkaConsumer<T> protected <T> org.apache.atlas.kafka.KafkaConsumer<T>
createKafkaConsumer(Class<T> type, MessageDeserializer<T> deserializer, KafkaStream stream, createKafkaConsumer(Class<T> type, MessageDeserializer<T> deserializer, KafkaStream stream,
int consumerId, ConsumerConnector consumerConnector, boolean autoCommitEnabled) { int consumerId, ConsumerConnector consumerConnector, boolean autoCommitEnabled) {
return new org.apache.atlas.kafka.KafkaConsumer<T>(deserializer, stream, return new org.apache.atlas.kafka.KafkaConsumer<>(deserializer, stream,
consumerId, consumerConnector, autoCommitEnabled); consumerId, consumerConnector, autoCommitEnabled);
} }
......
...@@ -45,7 +45,7 @@ final class AtlasPluginClassLoaderUtil { ...@@ -45,7 +45,7 @@ final class AtlasPluginClassLoaderUtil {
LOG.debug("==> AtlasPluginClassLoaderUtil.getFilesInDirectories()"); LOG.debug("==> AtlasPluginClassLoaderUtil.getFilesInDirectories()");
} }
List<URL> ret = new ArrayList<URL>(); List<URL> ret = new ArrayList<>();
for (String libDir : libDirs) { for (String libDir : libDirs) {
getFilesInDirectory(libDir, ret); getFilesInDirectory(libDir, ret);
......
...@@ -9,6 +9,7 @@ ATLAS-1060 Add composite indexes for exact match performance improvements for al ...@@ -9,6 +9,7 @@ ATLAS-1060 Add composite indexes for exact match performance improvements for al
ATLAS-1127 Modify creation and modification timestamps to Date instead of Long(sumasai) ATLAS-1127 Modify creation and modification timestamps to Date instead of Long(sumasai)
ALL CHANGES: ALL CHANGES:
ATLAS-1304 Redundant code removal and code simplification (apoorvnaik via mneethiraj)
ATLAS-1345 Enhance search APIs to resolve hierarchical references (apoorvnaik via sumasai) ATLAS-1345 Enhance search APIs to resolve hierarchical references (apoorvnaik via sumasai)
ATLAS-1287 Subtasks: ATLAS-1288/ATLAS-1289 Integrated V2 API for Lineage,Entity Details,Tag assign to entity,Tags listing,tag create (kevalbhatt) ATLAS-1287 Subtasks: ATLAS-1288/ATLAS-1289 Integrated V2 API for Lineage,Entity Details,Tag assign to entity,Tags listing,tag create (kevalbhatt)
ATLAS-1303 Update hashCode and equals method to use standard JDK libraries (apoorvnaik via svimal2106) ATLAS-1303 Update hashCode and equals method to use standard JDK libraries (apoorvnaik via svimal2106)
......
...@@ -101,9 +101,9 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule { ...@@ -101,9 +101,9 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
Configuration configuration = getConfiguration(); Configuration configuration = getConfiguration();
bindAuditRepository(binder(), configuration); bindAuditRepository(binder(), configuration);
bind(DeleteHandler.class).to((Class<? extends DeleteHandler>) AtlasRepositoryConfiguration.getDeleteHandlerImpl()).asEagerSingleton(); bind(DeleteHandler.class).to(AtlasRepositoryConfiguration.getDeleteHandlerImpl()).asEagerSingleton();
bind(TypeCache.class).to((Class<? extends TypeCache>) AtlasRepositoryConfiguration.getTypeCache()).asEagerSingleton(); bind(TypeCache.class).to(AtlasRepositoryConfiguration.getTypeCache()).asEagerSingleton();
//Add EntityAuditListener as EntityChangeListener //Add EntityAuditListener as EntityChangeListener
Multibinder<EntityChangeListener> entityChangeListenerBinder = Multibinder<EntityChangeListener> entityChangeListenerBinder =
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment