Commit 40ee9492 by Shwetha GS

ATLAS-622 Introduce soft delete (shwethags)

parent daf812aa
......@@ -459,7 +459,7 @@ public class HiveMetaStoreBridge {
final String[] parts = tableQualifiedName.split("@");
final String tableName = parts[0];
final String clusterName = parts[1];
return String.format("%s.%s@%s", tableName, colName, clusterName);
return String.format("%s.%s@%s", tableName, colName.toLowerCase(), clusterName);
}
public List<Referenceable> getColumns(List<FieldSchema> schemaList, String tableQualifiedName) throws Exception {
......
......@@ -481,7 +481,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
return entitiesCreatedOrUpdated;
}
private String normalize(String str) {
public static String normalize(String str) {
if (StringUtils.isEmpty(str)) {
return null;
}
......
......@@ -20,8 +20,10 @@ package org.apache.atlas.hive.hook;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.sun.jersey.api.client.ClientResponse;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.fs.model.FSDataTypes;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataModelGenerator;
......@@ -32,7 +34,6 @@ import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.utils.ParamChecker;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
......@@ -41,7 +42,6 @@ import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.hooks.Entity;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.testng.Assert;
......@@ -53,26 +53,24 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.atlas.hive.hook.HiveHook.normalize;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.fail;
public class HiveHookIT {
public static final Logger LOG = org.slf4j.LoggerFactory.getLogger(HiveHookIT.class);
private static final Logger LOG = org.slf4j.LoggerFactory.getLogger(HiveHookIT.class);
private static final String DGI_URL = "http://localhost:21000/";
private static final String CLUSTER_NAME = "test";
public static final String DEFAULT_DB = "default";
private Driver driver;
private AtlasClient dgiCLient;
private AtlasClient atlasClient;
private SessionState ss;
private static final String INPUTS = AtlasClient.PROCESS_ATTRIBUTE_INPUTS;
private static final String OUTPUTS = AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS;
private enum QUERY_TYPE {
GREMLIN,
DSL
}
@BeforeClass
public void setUp() throws Exception {
//Set-up hive session
......@@ -87,9 +85,9 @@ public class HiveHookIT {
SessionState.setCurrentSessionState(ss);
Configuration configuration = ApplicationProperties.get();
dgiCLient = new AtlasClient(configuration.getString(HiveMetaStoreBridge.ATLAS_ENDPOINT, DGI_URL));
atlasClient = new AtlasClient(configuration.getString(HiveMetaStoreBridge.ATLAS_ENDPOINT, DGI_URL));
HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(conf, dgiCLient);
HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(conf, atlasClient);
hiveMetaStoreBridge.registerHiveDataModel();
}
......@@ -107,7 +105,7 @@ public class HiveHookIT {
runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1', 'p2'='v2')");
String dbId = assertDatabaseIsRegistered(dbName);
Referenceable definition = dgiCLient.getEntity(dbId);
Referenceable definition = atlasClient.getEntity(dbId);
Map params = (Map) definition.get(HiveDataModelGenerator.PARAMETERS);
Assert.assertNotNull(params);
Assert.assertEquals(params.size(), 2);
......@@ -115,11 +113,13 @@ public class HiveHookIT {
//There should be just one entity per dbname
runCommand("drop database " + dbName);
assertDBIsNotRegistered(dbName);
runCommand("create database " + dbName);
String dbid = assertDatabaseIsRegistered(dbName);
//assert on qualified name
Referenceable dbEntity = dgiCLient.getEntity(dbid);
Referenceable dbEntity = atlasClient.getEntity(dbid);
Assert.assertEquals(dbEntity.get("qualifiedName"), dbName.toLowerCase() + "@" + CLUSTER_NAME);
}
......@@ -149,7 +149,7 @@ public class HiveHookIT {
private String createTable(boolean isPartitioned) throws Exception {
String tableName = tableName();
runCommand("create table " + tableName + "(id int, name string) comment 'table comment' " + (isPartitioned ?
" partitioned by(dt string)" : ""));
" partitioned by(dt string)" : ""));
return tableName;
}
......@@ -174,14 +174,15 @@ public class HiveHookIT {
assertTableIsRegistered(dbName, tableName);
//there is only one instance of column registered
String colId = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName), colName));
Referenceable colEntity = dgiCLient.getEntity(colId);
String colId = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName), colName));
Referenceable colEntity = atlasClient.getEntity(colId);
Assert.assertEquals(colEntity.get("qualifiedName"), String.format("%s.%s.%s@%s", dbName.toLowerCase(),
tableName.toLowerCase(), colName.toLowerCase(), CLUSTER_NAME));
tableName = createTable();
String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
Referenceable tableRef = dgiCLient.getEntity(tableId);
Referenceable tableRef = atlasClient.getEntity(tableId);
Assert.assertEquals(tableRef.get("tableType"), TableType.MANAGED_TABLE.name());
Assert.assertEquals(tableRef.get(HiveDataModelGenerator.COMMENT), "table comment");
String entityName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName);
......@@ -227,17 +228,19 @@ public class HiveHookIT {
}
private String assertColumnIsRegistered(String colName) throws Exception {
LOG.debug("Searching for column {}", colName.toLowerCase());
String query =
String.format("%s where qualifiedName = '%s'", HiveDataTypes.HIVE_COLUMN.getName(), colName.toLowerCase());
return assertEntityIsRegistered(query);
return assertColumnIsRegistered(colName, null);
}
private String assertColumnIsRegistered(String colName, AssertPredicate assertPredicate) throws Exception {
LOG.debug("Searching for column {}", colName);
return assertEntityIsRegistered(HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
colName, assertPredicate);
}
private void assertColumnIsNotRegistered(String colName) throws Exception {
LOG.debug("Searching for column {}", colName);
String query =
String.format("%s where qualifiedName = '%s'", HiveDataTypes.HIVE_COLUMN.getName(), colName.toLowerCase());
assertEntityIsNotRegistered(QUERY_TYPE.DSL, query);
assertEntityIsNotRegistered(HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
colName);
}
@Test
......@@ -277,7 +280,7 @@ public class HiveHookIT {
//Check lineage which includes table1
String datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName);
JSONObject response = dgiCLient.getInputGraph(datasetName);
JSONObject response = atlasClient.getInputGraph(datasetName);
JSONObject vertices = response.getJSONObject("values").getJSONObject("vertices");
Assert.assertTrue(vertices.has(viewId));
Assert.assertTrue(vertices.has(table1Id));
......@@ -293,7 +296,7 @@ public class HiveHookIT {
Assert.assertEquals(assertTableIsRegistered(DEFAULT_DB, viewName), viewId);
datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName);
response = dgiCLient.getInputGraph(datasetName);
response = atlasClient.getInputGraph(datasetName);
vertices = response.getJSONObject("values").getJSONObject("vertices");
Assert.assertTrue(vertices.has(viewId));
......@@ -304,7 +307,7 @@ public class HiveHookIT {
Assert.assertTrue(vertices.has(table1Id));
//Outputs dont exist
response = dgiCLient.getOutputGraph(datasetName);
response = atlasClient.getOutputGraph(datasetName);
vertices = response.getJSONObject("values").getJSONObject("vertices");
Assert.assertEquals(vertices.length(), 0);
}
......@@ -358,7 +361,7 @@ public class HiveHookIT {
private Referenceable validateProcess(String query, int numInputs, int numOutputs) throws Exception {
String processId = assertProcessIsRegistered(query);
Referenceable process = dgiCLient.getEntity(processId);
Referenceable process = atlasClient.getEntity(processId);
if (numInputs == 0) {
Assert.assertNull(process.get(INPUTS));
} else {
......@@ -376,7 +379,7 @@ public class HiveHookIT {
private Referenceable validateProcess(String query, String[] inputs, String[] outputs) throws Exception {
String processId = assertProcessIsRegistered(query);
Referenceable process = dgiCLient.getEntity(processId);
Referenceable process = atlasClient.getEntity(processId);
if (inputs == null) {
Assert.assertNull(process.get(INPUTS));
} else {
......@@ -406,7 +409,7 @@ public class HiveHookIT {
String inputTableId = assertTableIsRegistered(DEFAULT_DB, tableName);
String opTableId = assertTableIsRegistered(DEFAULT_DB, insertTableName);
validateProcess(query, new String[] {inputTableId}, new String[] {opTableId});
validateProcess(query, new String[]{inputTableId}, new String[]{opTableId});
}
@Test
......@@ -450,7 +453,7 @@ public class HiveHookIT {
String ipTableId = assertTableIsRegistered(DEFAULT_DB, tableName);
String opTableId = assertTableIsRegistered(DEFAULT_DB, insertTableName);
validateProcess(query, new String[] {ipTableId}, new String[] {opTableId});
validateProcess(query, new String[]{ipTableId}, new String[]{opTableId});
}
@Test
......@@ -465,7 +468,7 @@ public class HiveHookIT {
String ipTableId = assertTableIsRegistered(DEFAULT_DB, tableName);
String opTableId = assertTableIsRegistered(DEFAULT_DB, insertTableName);
validateProcess(query, new String[] {ipTableId}, new String[] {opTableId});
validateProcess(query, new String[]{ipTableId}, new String[]{opTableId});
}
private String random() {
......@@ -571,7 +574,7 @@ public class HiveHookIT {
private List<Referenceable> getColumns(String dbName, String tableName) throws Exception {
String tableId = assertTableIsRegistered(dbName, tableName);
Referenceable tableRef = dgiCLient.getEntity(tableId);
Referenceable tableRef = atlasClient.getEntity(tableId);
return ((List<Referenceable>)tableRef.get(HiveDataModelGenerator.COLUMNS));
}
......@@ -582,7 +585,9 @@ public class HiveHookIT {
String query = "alter table " + tableName + " add columns (" + column + " string)";
runCommand(query);
assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), column));
assertColumnIsRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
column));
//Verify the number of columns present in the table
final List<Referenceable> columns = getColumns(DEFAULT_DB, tableName);
......@@ -595,13 +600,21 @@ public class HiveHookIT {
final String colDropped = "id";
String query = "alter table " + tableName + " replace columns (name string)";
runCommand(query);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), colDropped));
assertColumnIsNotRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
colDropped));
//Verify the number of columns present in the table
final List<Referenceable> columns = getColumns(DEFAULT_DB, tableName);
Assert.assertEquals(columns.size(), 1);
assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable tableRef) throws Exception {
List<Referenceable> columns = (List<Referenceable>) tableRef.get(HiveDataModelGenerator.COLUMNS);
Assert.assertEquals(columns.size(), 1);
Assert.assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), "name");
Assert.assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), "name");
}
});
}
@Test
......@@ -612,12 +625,15 @@ public class HiveHookIT {
String tableName = createTable();
String query = String.format("alter table %s change %s %s string", tableName, oldColName, newColName);
runCommand(query);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName));
assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName));
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName));
assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName));
//Verify the number of columns present in the table
List<Referenceable> columns = getColumns(DEFAULT_DB, tableName);
Assert.assertEquals(columns.size(), 2);
//Change column type
oldColName = "name1";
newColName = "name2";
......@@ -627,46 +643,70 @@ public class HiveHookIT {
columns = getColumns(DEFAULT_DB, tableName);
Assert.assertEquals(columns.size(), 2);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName));
String newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName);
assertColumnIsRegistered(newColQualifiedName);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName));
Assert.assertEquals(columns.get(1).get("type"), "int");
String newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName);
assertColumnIsRegistered(newColQualifiedName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable entity) throws Exception {
assertEquals(entity.get("type"), "int");
}
});
//Change name and add comment
oldColName = "name2";
newColName = "name3";
final String comment = "added comment";
query = String.format("alter table %s change column %s %s %s COMMENT '%s' after id", tableName, oldColName, newColName, newColType, comment);
query = String.format("alter table %s change column %s %s %s COMMENT '%s' after id", tableName, oldColName,
newColName, newColType, comment);
runCommand(query);
columns = getColumns(DEFAULT_DB, tableName);
Assert.assertEquals(columns.size(), 2);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName));
newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName);
assertColumnIsRegistered(newColQualifiedName);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName));
newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName);
Assert.assertEquals(columns.get(1).get(HiveDataModelGenerator.COMMENT), comment);
assertColumnIsRegistered(newColQualifiedName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable entity) throws Exception {
assertEquals(entity.get(HiveDataModelGenerator.COMMENT), comment);
}
});
//Change column position
oldColName = "name3";
newColName = "name4";
query = String.format("alter table %s change column %s %s %s first", tableName, oldColName, newColName, newColType);
query = String.format("alter table %s change column %s %s %s first", tableName, oldColName, newColName,
newColType);
runCommand(query);
columns = getColumns(DEFAULT_DB, tableName);
Assert.assertEquals(columns.size(), 2);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName));
newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName));
newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName);
assertColumnIsRegistered(newColQualifiedName);
//Change col position again
Assert.assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), newColName);
Assert.assertEquals(columns.get(1).get(HiveDataModelGenerator.NAME), "id");
final String finalNewColName = newColName;
assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable entity) throws Exception {
List<Referenceable> columns = (List<Referenceable>) entity.get(HiveDataModelGenerator.COLUMNS);
assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), finalNewColName);
assertEquals(columns.get(1).get(HiveDataModelGenerator.NAME), "id");
}
}
);
//Change col position again
oldColName = "name4";
newColName = "name5";
query = String.format("alter table %s change column %s %s %s after id", tableName, oldColName, newColName, newColType);
......@@ -675,16 +715,27 @@ public class HiveHookIT {
columns = getColumns(DEFAULT_DB, tableName);
Assert.assertEquals(columns.size(), 2);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName));
newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName));
newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName);
assertColumnIsRegistered(newColQualifiedName);
//Check col position
Assert.assertEquals(columns.get(1).get(HiveDataModelGenerator.NAME), newColName);
Assert.assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), "id");
final String finalNewColName2 = newColName;
assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable entity) throws Exception {
List<Referenceable> columns = (List<Referenceable>) entity.get(HiveDataModelGenerator.COLUMNS);
assertEquals(columns.get(1).get(HiveDataModelGenerator.NAME), finalNewColName2);
assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), "id");
}
}
);
}
@Test()
@Test
public void testTruncateTable() throws Exception {
String tableName = createTable(false);
String query = String.format("truncate table %s", tableName);
......@@ -695,7 +746,7 @@ public class HiveHookIT {
//Check lineage
String datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName);
JSONObject response = dgiCLient.getInputGraph(datasetName);
JSONObject response = atlasClient.getInputGraph(datasetName);
JSONObject vertices = response.getJSONObject("values").getJSONObject("vertices");
//Below should be assertTrue - Fix https://issues.apache.org/jira/browse/ATLAS-653
Assert.assertFalse(vertices.has(tableId));
......@@ -708,15 +759,24 @@ public class HiveHookIT {
String query = String.format("ALTER TABLE %s PARTITION COLUMN (dt %s)", tableName, newType);
runCommand(query);
final String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
final String dtColId = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "dt"));
Referenceable table = dgiCLient.getEntity(tableId);
Referenceable column = dgiCLient.getEntity(dtColId);
Assert.assertEquals(column.get("type"), newType);
String colQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "dt");
final String dtColId = assertColumnIsRegistered(colQualifiedName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable column) throws Exception {
Assert.assertEquals(column.get("type"), newType);
}
});
final List<Referenceable> partitionKeys = (List<Referenceable>) table.get("partitionKeys");
Assert.assertEquals(partitionKeys.size(), 1);
Assert.assertEquals(partitionKeys.get(0).getId()._getId(), dtColId);
assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable table) throws Exception {
final List<Referenceable> partitionKeys = (List<Referenceable>) table.get("partitionKeys");
Assert.assertEquals(partitionKeys.size(), 1);
Assert.assertEquals(partitionKeys.get(0).getId()._getId(), dtColId);
}
});
}
@Test
......@@ -742,17 +802,18 @@ public class HiveHookIT {
String query = "alter table " + tableName + " set location '" + testPath + "'";
runCommand(query);
String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
//Verify the number of columns present in the table
Referenceable tableRef = dgiCLient.getEntity(tableId);
Referenceable sdRef = (Referenceable)tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
Assert.assertEquals(sdRef.get("location"), testPath);
String tableId = assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable tableRef) throws Exception {
Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
Assert.assertEquals(sdRef.get("location"), testPath);
}
});
Referenceable processReference = validateProcess(query, 1, 1);
validateHDFSPaths(processReference, testPath, INPUTS);
validateOutputTables(processReference, tableId);
}
private String validateHDFSPaths(Referenceable processReference, String testPath, String attributeName) throws Exception {
......@@ -762,7 +823,7 @@ public class HiveHookIT {
String hdfsPathId = assertHDFSPathIsRegistered(testPathNormed);
Assert.assertEquals(hdfsPathRefs.get(0)._getId(), hdfsPathId);
Referenceable hdfsPathRef = dgiCLient.getEntity(hdfsPathId);
Referenceable hdfsPathRef = atlasClient.getEntity(hdfsPathId);
Assert.assertEquals(hdfsPathRef.get("path"), testPathNormed);
Assert.assertEquals(hdfsPathRef.get("name"), testPathNormed);
// Assert.assertEquals(hdfsPathRef.get("name"), new Path(testPath).getName());
......@@ -771,14 +832,9 @@ public class HiveHookIT {
return hdfsPathRef.getId()._getId();
}
private String assertHDFSPathIsRegistered(String path) throws Exception {
final String typeName = FSDataTypes.HDFS_PATH().toString();
final String parentTypeName = FSDataTypes.FS_PATH().toString();
String gremlinQuery =
String.format("g.V.has('__typeName', '%s').has('%s.path', \"%s\").toList()", typeName, parentTypeName,
normalize(path));
return assertEntityIsRegistered(gremlinQuery);
LOG.debug("Searching for hdfs path {}", path);
return assertEntityIsRegistered(FSDataTypes.HDFS_PATH().toString(), "name", path, null);
}
@Test
......@@ -788,18 +844,25 @@ public class HiveHookIT {
String query = "alter table " + tableName + " set FILEFORMAT " + testFormat;
runCommand(query);
String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
Referenceable tableRef = dgiCLient.getEntity(tableId);
Referenceable sdRef = (Referenceable)tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_DESC_INPUT_FMT), "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat");
Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_DESC_OUTPUT_FMT), "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat");
Assert.assertNotNull(sdRef.get("serdeInfo"));
Struct serdeInfo = (Struct) sdRef.get("serdeInfo");
Assert.assertEquals(serdeInfo.get("serializationLib"), "org.apache.hadoop.hive.ql.io.orc.OrcSerde");
Assert.assertNotNull(serdeInfo.get(HiveDataModelGenerator.PARAMETERS));
Assert.assertEquals(((Map<String, String>)serdeInfo.get(HiveDataModelGenerator.PARAMETERS)).get("serialization.format"), "1");
assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable tableRef) throws Exception {
Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_DESC_INPUT_FMT),
"org.apache.hadoop.hive.ql.io.orc.OrcInputFormat");
Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_DESC_OUTPUT_FMT),
"org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat");
Assert.assertNotNull(sdRef.get("serdeInfo"));
Struct serdeInfo = (Struct) sdRef.get("serdeInfo");
Assert.assertEquals(serdeInfo.get("serializationLib"), "org.apache.hadoop.hive.ql.io.orc.OrcSerde");
Assert.assertNotNull(serdeInfo.get(HiveDataModelGenerator.PARAMETERS));
Assert.assertEquals(
((Map<String, String>) serdeInfo.get(HiveDataModelGenerator.PARAMETERS))
.get("serialization.format"),
"1");
}
});
/**
......@@ -807,7 +870,7 @@ public class HiveHookIT {
* query = "alter table " + tableName + " STORED AS " + testFormat.toUpperCase();
* runCommand(query);
* tableRef = dgiCLient.getEntity(tableId);
* tableRef = atlasClient.getEntity(tableId);
* sdRef = (Referenceable)tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
* Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_DESC_INPUT_FMT), "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat");
* Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_DESC_OUTPUT_FMT), "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat");
......@@ -818,31 +881,37 @@ public class HiveHookIT {
@Test
public void testAlterTableBucketingClusterSort() throws Exception {
String tableName = createTable();
ImmutableList<String> cols = ImmutableList.<String>of("id");
ImmutableList<String> cols = ImmutableList.of("id");
runBucketSortQuery(tableName, 5, cols, cols);
cols = ImmutableList.<String>of("id", "name");
cols = ImmutableList.of("id", "name");
runBucketSortQuery(tableName, 2, cols, cols);
}
private void runBucketSortQuery(String tableName, int numBuckets, ImmutableList<String> bucketCols,ImmutableList<String> sortCols) throws Exception {
private void runBucketSortQuery(String tableName, final int numBuckets, final ImmutableList<String> bucketCols,
final ImmutableList<String> sortCols) throws Exception {
final String fmtQuery = "alter table %s CLUSTERED BY (%s) SORTED BY (%s) INTO %s BUCKETS";
String query = String.format(fmtQuery, tableName, stripListBrackets(bucketCols.toString()), stripListBrackets(sortCols.toString()), numBuckets);
String query = String.format(fmtQuery, tableName, stripListBrackets(bucketCols.toString()),
stripListBrackets(sortCols.toString()), numBuckets);
runCommand(query);
verifyBucketSortingProperties(tableName, numBuckets, bucketCols, sortCols);
assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable entity) throws Exception {
verifyBucketSortingProperties(entity, numBuckets, bucketCols, sortCols);
}
});
}
private String stripListBrackets(String listElements) {
return StringUtils.strip(StringUtils.strip(listElements, "["), "]");
}
private void verifyBucketSortingProperties(String tableName, int numBuckets, ImmutableList<String> bucketColNames, ImmutableList<String> sortcolNames) throws Exception {
String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
Referenceable tableRef = dgiCLient.getEntity(tableId);
Referenceable sdRef = (Referenceable)tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
Assert.assertEquals(((scala.math.BigInt) sdRef.get(HiveDataModelGenerator.STORAGE_NUM_BUCKETS)).intValue(), numBuckets);
private void verifyBucketSortingProperties(Referenceable tableRef, int numBuckets,
ImmutableList<String> bucketColNames,
ImmutableList<String> sortcolNames) throws Exception {
Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
Assert.assertEquals(((scala.math.BigInt) sdRef.get(HiveDataModelGenerator.STORAGE_NUM_BUCKETS)).intValue(),
numBuckets);
Assert.assertEquals(sdRef.get("bucketCols"), bucketColNames);
List<Struct> hiveOrderStructList = (List<Struct>) sdRef.get("sortCols");
......@@ -851,7 +920,7 @@ public class HiveHookIT {
for (int i = 0; i < sortcolNames.size(); i++) {
Assert.assertEquals(hiveOrderStructList.get(i).get("col"), sortcolNames.get(i));
Assert.assertEquals(((scala.math.BigInt)hiveOrderStructList.get(i).get("order")).intValue(), 1);
Assert.assertEquals(((scala.math.BigInt) hiveOrderStructList.get(i).get("order")).intValue(), 1);
}
}
......@@ -882,8 +951,12 @@ public class HiveHookIT {
final String query = String.format("drop table %s ", tableName);
runCommand(query);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "name"));
assertColumnIsNotRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
"id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
"name"));
assertTableIsNotRegistered(DEFAULT_DB, tableName);
}
......@@ -903,8 +976,11 @@ public class HiveHookIT {
runCommand(query);
//Verify columns are not registered for one of the tables
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]), "id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]), "name"));
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(
HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]), "id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]),
"name"));
for(int i = 0; i < numTables; i++) {
assertTableIsNotRegistered(dbName, tableNames[i]);
......@@ -974,8 +1050,12 @@ public class HiveHookIT {
query = String.format("drop view %s ", viewName);
runCommand(query);
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), "id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), "name"));
assertColumnIsNotRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName),
"id"));
assertColumnIsNotRegistered(HiveMetaStoreBridge
.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName),
"name"));
assertTableIsNotRegistered(DEFAULT_DB, viewName);
}
......@@ -1006,16 +1086,20 @@ public class HiveHookIT {
@Test
public void testAlterDBOwner() throws Exception {
String dbName = createDatabase();
assertDatabaseIsRegistered(dbName);
final String owner = "testOwner";
String dbId = assertDatabaseIsRegistered(dbName);
final String fmtQuery = "alter database %s set OWNER %s %s";
String query = String.format(fmtQuery, dbName, "USER", owner);
runCommand(query);
assertDatabaseIsRegistered(dbName);
Referenceable entity = dgiCLient.getEntity(dbId);
Assert.assertEquals(entity.get(HiveDataModelGenerator.OWNER), owner);
assertDatabaseIsRegistered(dbName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable entity) {
assertEquals(entity.get(HiveDataModelGenerator.OWNER), owner);
}
});
}
@Test
......@@ -1073,30 +1157,38 @@ public class HiveHookIT {
testAlterProperties(Entity.Type.TABLE, viewName, fmtQuery);
}
private void verifyEntityProperties(Entity.Type type, String entityName, Map<String, String> expectedProps, boolean checkIfNotExists) throws Exception {
String entityId = null;
private void verifyEntityProperties(Entity.Type type, String entityName, final Map<String, String> expectedProps,
final boolean checkIfNotExists) throws Exception {
switch(type) {
case TABLE:
entityId = assertTableIsRegistered(DEFAULT_DB, entityName);
assertTableIsRegistered(DEFAULT_DB, entityName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable entity) throws Exception {
verifyProperties(entity, expectedProps, checkIfNotExists);
}
});
break;
case DATABASE:
entityId = assertDatabaseIsRegistered(entityName);
assertDatabaseIsRegistered(entityName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable entity) throws Exception {
verifyProperties(entity, expectedProps, checkIfNotExists);
}
});
break;
}
Referenceable ref = dgiCLient.getEntity(entityId);
verifyProperties(ref, expectedProps, checkIfNotExists);
}
private void verifyTableSdProperties(String tableName, String serdeLib, Map<String, String> expectedProps) throws Exception {
String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
Referenceable tableRef = dgiCLient.getEntity(tableId);
Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
Struct serdeInfo = (Struct) sdRef.get("serdeInfo");
Assert.assertEquals(serdeInfo.get("serializationLib"), serdeLib);
verifyProperties(serdeInfo, expectedProps, false);
private void verifyTableSdProperties(String tableName, final String serdeLib, final Map<String, String> expectedProps) throws Exception {
assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() {
@Override
public void assertOnEntity(Referenceable tableRef) throws Exception {
Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
Struct serdeInfo = (Struct) sdRef.get("serdeInfo");
Assert.assertEquals(serdeInfo.get("serializationLib"), serdeLib);
verifyProperties(serdeInfo, expectedProps, false);
}
});
}
private void verifyProperties(Struct referenceable, Map<String, String> expectedProps, boolean checkIfNotExists) {
......@@ -1119,108 +1211,80 @@ public class HiveHookIT {
}
private String assertProcessIsRegistered(String queryStr) throws Exception {
// String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
// normalize(queryStr));
// assertEntityIsRegistered(dslQuery, true);
//todo replace with DSL
String typeName = HiveDataTypes.HIVE_PROCESS.getName();
String gremlinQuery =
String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()", typeName, typeName,
normalize(queryStr));
return assertEntityIsRegistered(gremlinQuery);
LOG.debug("Searching for process with query {}", queryStr);
return assertEntityIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.NAME, normalize(queryStr), null);
}
private void assertProcessIsNotRegistered(String queryStr) throws Exception {
// String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
// normalize(queryStr));
// assertEntityIsRegistered(dslQuery, true);
//todo replace with DSL
String typeName = HiveDataTypes.HIVE_PROCESS.getName();
String gremlinQuery =
String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()", typeName, typeName,
normalize(queryStr));
assertEntityIsNotRegistered(QUERY_TYPE.GREMLIN, gremlinQuery);
}
private String normalize(String str) {
if (StringUtils.isEmpty(str)) {
return null;
}
return StringEscapeUtils.escapeJava(str.toLowerCase());
LOG.debug("Searching for process with query {}", queryStr);
assertEntityIsNotRegistered(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.NAME, normalize(queryStr));
}
private void assertTableIsNotRegistered(String dbName, String tableName) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName);
String query = String.format(
"%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'" + " select t",
HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME);
assertEntityIsNotRegistered(QUERY_TYPE.DSL, query);
String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName);
assertEntityIsNotRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.NAME, tableQualifiedName);
}
private void assertDBIsNotRegistered(String dbName) throws Exception {
LOG.debug("Searching for database {}.{}", dbName);
String query = String.format(
"%s as d where name = '%s' and clusterName = '%s'" + " select d",
HiveDataTypes.HIVE_DB.getName(), dbName.toLowerCase(), CLUSTER_NAME);
assertEntityIsNotRegistered(QUERY_TYPE.DSL, query);
LOG.debug("Searching for database {}", dbName);
String dbQualifiedName = HiveMetaStoreBridge.getDBQualifiedName(CLUSTER_NAME, dbName);
assertEntityIsNotRegistered(HiveDataTypes.HIVE_DB.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbQualifiedName);
}
private String assertTableIsRegistered(String dbName, String tableName) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName);
String query = String.format(
"%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'" + " select t",
HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME);
return assertEntityIsRegistered(query, "t");
return assertTableIsRegistered(dbName, tableName, null);
}
private String getTableEntity(String dbName, String tableName) throws Exception {
private String assertTableIsRegistered(String dbName, String tableName, AssertPredicate assertPredicate) throws Exception {
LOG.debug("Searching for table {}.{}", dbName, tableName);
String query = String.format(
"%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'" + " select t",
HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME);
return assertEntityIsRegistered(query, "t");
String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName);
return assertEntityIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.NAME, tableQualifiedName,
assertPredicate);
}
private String assertDatabaseIsRegistered(String dbName) throws Exception {
return assertDatabaseIsRegistered(dbName, null);
}
private String assertDatabaseIsRegistered(String dbName, AssertPredicate assertPredicate) throws Exception {
LOG.debug("Searching for database {}", dbName);
String query = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(),
dbName.toLowerCase(), CLUSTER_NAME);
return assertEntityIsRegistered(query);
String dbQualifiedName = HiveMetaStoreBridge.getDBQualifiedName(CLUSTER_NAME, dbName);
return assertEntityIsRegistered(HiveDataTypes.HIVE_DB.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
dbQualifiedName, assertPredicate);
}
private String assertEntityIsRegistered(final String query, String... arg) throws Exception {
waitFor(60000, new Predicate() {
private String assertEntityIsRegistered(final String typeName, final String property, final String value,
final AssertPredicate assertPredicate) throws Exception {
waitFor(80000, new Predicate() {
@Override
public boolean evaluate() throws Exception {
JSONArray results = dgiCLient.search(query);
return results.length() == 1;
public void evaluate() throws Exception {
Referenceable entity = atlasClient.getEntity(typeName, property, value);
assertNotNull(entity);
if(assertPredicate != null) {
assertPredicate.assertOnEntity(entity);
}
}
});
String column = (arg.length > 0) ? arg[0] : "_col_0";
JSONArray results = dgiCLient.search(query);
JSONObject row = results.getJSONObject(0);
if (row.has("__guid")) {
return row.getString("__guid");
} else if (row.has("$id$")) {
return row.getJSONObject("$id$").getString("id");
} else {
return row.getJSONObject(column).getString("id");
}
Referenceable entity = atlasClient.getEntity(typeName, property, value);
return entity.getId()._getId();
}
private void assertEntityIsNotRegistered(QUERY_TYPE queryType, String query) throws Exception {
JSONArray results = null;
switch(queryType) {
case DSL :
results = dgiCLient.searchByDSL(query);
break;
case GREMLIN :
results = dgiCLient.searchByGremlin(query);
break;
}
Assert.assertEquals(results.length(), 0);
private void assertEntityIsNotRegistered(final String typeName, final String property, final String value) throws Exception {
waitFor(80000, new Predicate() {
@Override
public void evaluate() throws Exception {
try {
atlasClient.getEntity(typeName, property, value);
} catch (AtlasServiceException e) {
if(e.getStatus() == ClientResponse.Status.NOT_FOUND) {
return;
}
}
fail(String.format("Entity was not supposed to exist for typeName = %s, attributeName = %s, "
+ "attributeValue = %s", typeName, property, value));
}
});
}
@Test
......@@ -1236,13 +1300,13 @@ public class HiveHookIT {
String table2Id = assertTableIsRegistered(db2, table2);
String datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, db2, table2);
JSONObject response = dgiCLient.getInputGraph(datasetName);
JSONObject response = atlasClient.getInputGraph(datasetName);
JSONObject vertices = response.getJSONObject("values").getJSONObject("vertices");
Assert.assertTrue(vertices.has(table1Id));
Assert.assertTrue(vertices.has(table2Id));
datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, table1);
response = dgiCLient.getOutputGraph(datasetName);
response = atlasClient.getOutputGraph(datasetName);
vertices = response.getJSONObject("values").getJSONObject("vertices");
Assert.assertTrue(vertices.has(table1Id));
Assert.assertTrue(vertices.has(table2Id));
......@@ -1255,15 +1319,18 @@ public class HiveHookIT {
runCommand("show transactions");
}
public interface Predicate {
public interface AssertPredicate {
void assertOnEntity(Referenceable entity) throws Exception;
}
public interface Predicate {
/**
* Perform a predicate evaluation.
*
* @return the boolean result of the evaluation.
* @throws Exception thrown if the predicate evaluation could not evaluate.
*/
boolean evaluate() throws Exception;
void evaluate() throws Exception;
}
/**
......@@ -1276,13 +1343,17 @@ public class HiveHookIT {
ParamChecker.notNull(predicate, "predicate");
long mustEnd = System.currentTimeMillis() + timeout;
boolean eval;
while (!(eval = predicate.evaluate()) && System.currentTimeMillis() < mustEnd) {
LOG.info("Waiting up to {} msec", mustEnd - System.currentTimeMillis());
Thread.sleep(100);
}
if (!eval) {
throw new Exception("Waiting timed out after " + timeout + " msec");
while (true) {
try {
predicate.evaluate();
return;
} catch(Error | Exception e) {
if (System.currentTimeMillis() >= mustEnd) {
fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e);
}
LOG.debug("Waiting up to " + (mustEnd - System.currentTimeMillis()) + " msec as assertion failed", e);
Thread.sleep(300);
}
}
}
}
......@@ -57,6 +57,7 @@ import static org.apache.atlas.security.SecurityProperties.TLS_ENABLED;
*/
public class AtlasClient {
private static final Logger LOG = LoggerFactory.getLogger(AtlasClient.class);
public static final String NAME = "name";
public static final String GUID = "GUID";
public static final String TYPE = "type";
......@@ -403,6 +404,7 @@ public class AtlasClient {
* @throws AtlasServiceException
*/
public List<String> createType(String typeAsJson) throws AtlasServiceException {
LOG.debug("Creating type definition: {}", typeAsJson);
JSONObject response = callAPI(API.CREATE_TYPE, typeAsJson);
return extractResults(response, AtlasClient.TYPES, new ExtractOperation<String, JSONObject>() {
@Override
......@@ -429,6 +431,7 @@ public class AtlasClient {
* @throws AtlasServiceException
*/
public List<String> updateType(String typeAsJson) throws AtlasServiceException {
LOG.debug("Updating tyep definition: {}", typeAsJson);
JSONObject response = callAPI(API.UPDATE_TYPE, typeAsJson);
return extractResults(response, AtlasClient.TYPES, new ExtractOperation<String, JSONObject>() {
@Override
......@@ -474,6 +477,7 @@ public class AtlasClient {
* @throws AtlasServiceException
*/
public JSONArray createEntity(JSONArray entities) throws AtlasServiceException {
LOG.debug("Creating entities: {}", entities);
JSONObject response = callAPI(API.CREATE_ENTITY, entities.toString());
try {
return response.getJSONArray(GUID);
......@@ -522,6 +526,7 @@ public class AtlasClient {
public JSONArray updateEntities(Collection<Referenceable> entities) throws AtlasServiceException {
JSONArray entitiesArray = getEntitiesArray(entities);
LOG.debug("Updating entities: {}", entitiesArray);
JSONObject response = callAPI(API.UPDATE_ENTITY, entitiesArray.toString());
try {
return response.getJSONArray(GUID);
......@@ -538,6 +543,7 @@ public class AtlasClient {
* @param value property value
*/
public void updateEntityAttribute(final String guid, final String attribute, String value) throws AtlasServiceException {
LOG.debug("Updating entity id: {}, attribute name: {}, attribute value: {}", guid, attribute, value);
callAPIWithRetries(API.UPDATE_ENTITY_PARTIAL, value, new ResourceCreator() {
@Override
public WebResource createResource() {
......@@ -555,7 +561,7 @@ public class AtlasClient {
for (int i = 0; i < getNumberOfRetries(); i++) {
WebResource resource = resourceCreator.createResource();
try {
LOG.info("using resource {} for {} times", resource.getURI(), i);
LOG.debug("Using resource {} for {} times", resource.getURI(), i);
JSONObject result = callAPIWithResource(api, resource, requestObject);
return result;
} catch (ClientHandlerException che) {
......@@ -578,6 +584,7 @@ public class AtlasClient {
*/
public void updateEntity(String guid, Referenceable entity) throws AtlasServiceException {
String entityJson = InstanceSerialization.toJson(entity, true);
LOG.debug("Updating entity id {} with {}", guid, entityJson);
callAPI(API.UPDATE_ENTITY_PARTIAL, entityJson, guid);
}
......@@ -904,6 +911,7 @@ public class AtlasClient {
clientResponse = resource.accept(JSON_MEDIA_TYPE).type(JSON_MEDIA_TYPE)
.method(api.getMethod(), ClientResponse.class, requestObject);
LOG.debug("API {} returned status {}", resource.getURI(), clientResponse.getStatus());
if (clientResponse.getStatus() == api.getExpectedStatus().getStatusCode()) {
String responseAsString = clientResponse.getEntity(String.class);
try {
......
......@@ -91,4 +91,14 @@ public final class ApplicationProperties extends PropertiesConfiguration {
public static Configuration getSubsetConfiguration(Configuration inConf, String prefix) {
return inConf.subset(prefix);
}
public static Class getClass(String propertyName, String defaultValue) {
try {
Configuration configuration = get();
String propertyValue = configuration.getString(propertyName, defaultValue);
return Class.forName(propertyValue);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
......@@ -3,6 +3,7 @@ Apache Atlas Release Notes
--trunk - unreleased
INCOMPATIBLE CHANGES:
ATLAS-622 Introduce soft delete (shwethags)
ATLAS-494 UI Authentication (nixonrodrigues via shwethags)
ATLAS-621 Introduce entity state in Id object (shwethags)
ATLAS-474 Server does not start if the type is updated with same super type class information (dkantor via shwethags)
......
......@@ -35,9 +35,11 @@ import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.repository.audit.EntityAuditListener;
import org.apache.atlas.repository.audit.EntityAuditRepository;
import org.apache.atlas.repository.audit.HBaseBasedAuditRepository;
import org.apache.atlas.repository.graph.DeleteHandler;
import org.apache.atlas.repository.graph.GraphBackedMetadataRepository;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphProvider;
import org.apache.atlas.repository.graph.SoftDeleteHandler;
import org.apache.atlas.repository.graph.TitanGraphProvider;
import org.apache.atlas.repository.typestore.GraphBackedTypeStore;
import org.apache.atlas.repository.typestore.ITypeStore;
......@@ -85,6 +87,8 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
bindAuditRepository(binder());
bind(DeleteHandler.class).to(getDeleteHandler()).asEagerSingleton();
//Add EntityAuditListener as EntityChangeListener
Multibinder<EntityChangeListener> entityChangeListenerBinder =
Multibinder.newSetBinder(binder(), EntityChangeListener.class);
......@@ -103,4 +107,11 @@ public class RepositoryMetadataModule extends com.google.inject.AbstractModule {
Multibinder<Service> serviceBinder = Multibinder.newSetBinder(binder, Service.class);
serviceBinder.addBinding().to(HBaseBasedAuditRepository.class);
}
private static final String DELETE_HANDLER_IMPLEMENTATION_PROPERTY = "atlas.DeleteHandler.impl";
private Class<? extends DeleteHandler> getDeleteHandler() {
return ApplicationProperties.getClass(DELETE_HANDLER_IMPLEMENTATION_PROPERTY,
SoftDeleteHandler.class.getName());
}
}
......@@ -45,7 +45,7 @@ public class EntityAuditListener implements EntityChangeListener {
@Override
public void onEntitiesAdded(Collection<ITypedReferenceableInstance> entities) throws AtlasException {
List<EntityAuditEvent> events = new ArrayList<>();
long currentTime = System.currentTimeMillis();
long currentTime = RequestContext.get().getRequestTime();
for (ITypedReferenceableInstance entity : entities) {
EntityAuditEvent event = createEvent(entity, currentTime, EntityAuditEvent.EntityAuditAction.ENTITY_CREATE,
"Created: " + InstanceSerialization.toJson(entity, true));
......@@ -62,7 +62,7 @@ public class EntityAuditListener implements EntityChangeListener {
@Override
public void onEntitiesUpdated(Collection<ITypedReferenceableInstance> entities) throws AtlasException {
List<EntityAuditEvent> events = new ArrayList<>();
long currentTime = System.currentTimeMillis();
long currentTime = RequestContext.get().getRequestTime();
for (ITypedReferenceableInstance entity : entities) {
EntityAuditEvent event = createEvent(entity, currentTime, EntityAuditEvent.EntityAuditAction.ENTITY_UPDATE,
"Updated: " + InstanceSerialization.toJson(entity, true));
......@@ -73,7 +73,7 @@ public class EntityAuditListener implements EntityChangeListener {
@Override
public void onTraitAdded(ITypedReferenceableInstance entity, IStruct trait) throws AtlasException {
EntityAuditEvent event = createEvent(entity, System.currentTimeMillis(),
EntityAuditEvent event = createEvent(entity, RequestContext.get().getRequestTime(),
EntityAuditEvent.EntityAuditAction.TAG_ADD,
"Added trait: " + InstanceSerialization.toJson(trait, true));
auditRepository.putEvents(event);
......@@ -81,7 +81,7 @@ public class EntityAuditListener implements EntityChangeListener {
@Override
public void onTraitDeleted(ITypedReferenceableInstance entity, String traitName) throws AtlasException {
EntityAuditEvent event = createEvent(entity, System.currentTimeMillis(),
EntityAuditEvent event = createEvent(entity, RequestContext.get().getRequestTime(),
EntityAuditEvent.EntityAuditAction.TAG_DELETE, "Deleted trait: " + traitName);
auditRepository.putEvents(event);
}
......@@ -89,7 +89,7 @@ public class EntityAuditListener implements EntityChangeListener {
@Override
public void onEntitiesDeleted(Collection<ITypedReferenceableInstance> entities) throws AtlasException {
List<EntityAuditEvent> events = new ArrayList<>();
long currentTime = System.currentTimeMillis();
long currentTime = RequestContext.get().getRequestTime();
for (ITypedReferenceableInstance entity : entities) {
EntityAuditEvent event = createEvent(entity, currentTime,
EntityAuditEvent.EntityAuditAction.ENTITY_DELETE, "Deleted entity");
......
......@@ -31,9 +31,6 @@ public class AtlasEdgeLabel {
private final String qualifiedAttributeName_;
public AtlasEdgeLabel(String edgeLabel) {
if (!edgeLabel.startsWith(GraphHelper.EDGE_LABEL_PREFIX)) {
throw new IllegalArgumentException("Invalid edge label " + edgeLabel + ": missing required prefix " + GraphHelper.EDGE_LABEL_PREFIX);
}
String labelWithoutPrefix = edgeLabel.substring(GraphHelper.EDGE_LABEL_PREFIX.length());
String[] fields = labelWithoutPrefix.split("\\.", 3);
if (fields.length < 2 || fields.length > 3) {
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.graph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.AtlasException;
import org.apache.atlas.RequestContext;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.FieldMapping;
import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.atlas.typesystem.types.IDataType;
import org.apache.atlas.typesystem.types.StructType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import static org.apache.atlas.repository.graph.GraphHelper.EDGE_LABEL_PREFIX;
import static org.apache.atlas.repository.graph.GraphHelper.string;
public abstract class DeleteHandler {
public static final Logger LOG = LoggerFactory.getLogger(DeleteHandler.class);
private static final GraphHelper graphHelper = GraphHelper.getInstance();
protected TypeSystem typeSystem;
private boolean shouldUpdateReverseAttribute;
public DeleteHandler(TypeSystem typeSystem, boolean shouldUpdateReverseAttribute) {
this.typeSystem = typeSystem;
this.shouldUpdateReverseAttribute = shouldUpdateReverseAttribute;
}
/**
* Deletes the entity vertex - deletes the traits and all the references
* @param instanceVertex
* @throws AtlasException
*/
public void deleteEntity(Vertex instanceVertex) throws AtlasException {
String guid = GraphHelper.getIdFromVertex(instanceVertex);
String typeName = GraphHelper.getTypeName(instanceVertex);
RequestContext.get().recordDeletedEntity(guid, typeName);
deleteAllTraits(instanceVertex);
deleteTypeVertex(instanceVertex);
}
protected abstract void deleteEdge(Edge edge) throws AtlasException;
/**
* Deletes a type vertex - can be entity(class type) or just vertex(struct/trait type)
* @param instanceVertex
* @param typeCategory
* @throws AtlasException
*/
protected void deleteTypeVertex(Vertex instanceVertex, DataTypes.TypeCategory typeCategory) throws AtlasException {
switch (typeCategory) {
case STRUCT:
case TRAIT:
deleteTypeVertex(instanceVertex);
break;
case CLASS:
deleteEntity(instanceVertex);
break;
default:
throw new IllegalStateException("Type category " + typeCategory + " not handled");
}
}
/**
* Deleting any type vertex. Goes over the complex attributes and removes the references
* @param instanceVertex
* @throws AtlasException
*/
protected void deleteTypeVertex(Vertex instanceVertex) throws AtlasException {
LOG.debug("Deleting {}", string(instanceVertex));
String typeName = GraphHelper.getTypeName(instanceVertex);
IDataType type = typeSystem.getDataType(IDataType.class, typeName);
FieldMapping fieldMapping = getFieldMapping(type);
for (AttributeInfo attributeInfo : fieldMapping.fields.values()) {
LOG.debug("Deleting attribute {} for {}", attributeInfo.name, string(instanceVertex));
String edgeLabel = GraphHelper.getEdgeLabel(type, attributeInfo);
switch (attributeInfo.dataType().getTypeCategory()) {
case CLASS:
//If its class attribute, delete the reference
deleteReference(instanceVertex, edgeLabel, DataTypes.TypeCategory.CLASS, attributeInfo.isComposite);
break;
case STRUCT:
//If its struct attribute, delete the reference
deleteReference(instanceVertex, edgeLabel, DataTypes.TypeCategory.STRUCT);
break;
case ARRAY:
//For array attribute, if the element is struct/class, delete all the references
IDataType elementType = ((DataTypes.ArrayType) attributeInfo.dataType()).getElemType();
DataTypes.TypeCategory elementTypeCategory = elementType.getTypeCategory();
if (elementTypeCategory == DataTypes.TypeCategory.STRUCT ||
elementTypeCategory == DataTypes.TypeCategory.CLASS) {
Iterator<Edge> edges = GraphHelper.getOutGoingEdgesByLabel(instanceVertex, edgeLabel);
if (edges != null) {
while (edges.hasNext()) {
Edge edge = edges.next();
deleteReference(edge, elementType, attributeInfo);
}
}
}
break;
case MAP:
//For map attribute, if the value type is struct/class, delete all the references
DataTypes.MapType mapType = (DataTypes.MapType) attributeInfo.dataType();
DataTypes.TypeCategory valueTypeCategory = mapType.getValueType().getTypeCategory();
String propertyName = GraphHelper.getQualifiedFieldName(type, attributeInfo.name);
if (valueTypeCategory == DataTypes.TypeCategory.STRUCT ||
valueTypeCategory == DataTypes.TypeCategory.CLASS) {
List<String> keys = instanceVertex.getProperty(propertyName);
if (keys != null) {
for (String key : keys) {
String mapEdgeLabel = GraphHelper.getQualifiedNameForMapKey(edgeLabel, key);
deleteReference(instanceVertex, mapEdgeLabel, valueTypeCategory, attributeInfo.isComposite);
}
}
}
}
}
deleteVertex(instanceVertex, type.getTypeCategory());
}
public void deleteReference(Edge edge, IDataType dataType, AttributeInfo attributeInfo) throws AtlasException {
deleteReference(edge, dataType.getTypeCategory(), attributeInfo.isComposite);
}
public void deleteReference(Edge edge, DataTypes.TypeCategory typeCategory, boolean isComposite) throws AtlasException {
LOG.debug("Deleting {}", string(edge));
if (typeCategory == DataTypes.TypeCategory.STRUCT || typeCategory == DataTypes.TypeCategory.TRAIT
|| (typeCategory == DataTypes.TypeCategory.CLASS && isComposite)) {
//If the vertex is of type struct/trait, delete the edge and then the reference vertex as the vertex is not shared by any other entities.
//If the vertex is of type class, and its composite attribute, this reference vertex' lifecycle is controlled
//through this delete, hence delete the edge and the reference vertex.
Vertex vertexForDelete = edge.getVertex(Direction.IN);
//If deleting the edge and then the in vertex, reverse attribute shouldn't be updated
deleteEdge(edge, false);
deleteTypeVertex(vertexForDelete, typeCategory);
} else {
//If the vertex is of type class, and its not a composite attributes, the reference vertex' lifecycle is not controlled
//through this delete. Hence just remove the reference edge. Leave the reference vertex as is
//If deleting just the edge, reverse attribute should be updated for any references
//For example, for the department type system, if the person's manager edge is deleted, subordinates of manager should be updated
deleteEdge(edge, true);
}
}
public void deleteReference(Vertex instanceVertex, String edgeLabel, DataTypes.TypeCategory typeCategory)
throws AtlasException {
deleteReference(instanceVertex, edgeLabel, typeCategory, false);
}
public void deleteReference(Vertex instanceVertex, String edgeLabel, DataTypes.TypeCategory typeCategory,
boolean isComposite) throws AtlasException {
Edge edge = GraphHelper.getEdgeForLabel(instanceVertex, edgeLabel);
if (edge != null) {
deleteReference(edge, typeCategory, isComposite);
}
}
protected void deleteEdge(Edge edge, boolean updateReverseAttribute) throws AtlasException {
//update reverse attribute
if (updateReverseAttribute) {
AttributeInfo attributeInfo = getAttributeForEdge(edge.getLabel());
if (attributeInfo.reverseAttributeName != null) {
deleteEdgeBetweenVertices(edge.getVertex(Direction.IN), edge.getVertex(Direction.OUT),
attributeInfo.reverseAttributeName);
}
}
deleteEdge(edge);
}
protected void deleteVertex(Vertex instanceVertex, DataTypes.TypeCategory typeCategory) throws AtlasException {
//Update external references(incoming edges) to this vertex
LOG.debug("Setting the external references to {} to null(removing edges)", string(instanceVertex));
Iterator<Edge> edges = instanceVertex.getEdges(Direction.IN).iterator();
while(edges.hasNext()) {
Edge edge = edges.next();
String edgeState = edge.getProperty(Constants.STATE_PROPERTY_KEY);
if (Id.EntityState.ACTIVE.name().equals(edgeState)) {
//Delete only the active edge references
AttributeInfo attribute = getAttributeForEdge(edge.getLabel());
deleteEdgeBetweenVertices(edge.getVertex(Direction.OUT), edge.getVertex(Direction.IN), attribute.name);
deleteEdge(edge);
}
}
_deleteVertex(instanceVertex);
}
protected abstract void _deleteVertex(Vertex instanceVertex);
/**
* Deletes the edge between outvertex and inVertex. The edge is for attribute attributeName of outVertex
* @param outVertex
* @param inVertex
* @param attributeName
* @throws AtlasException
*/
protected void deleteEdgeBetweenVertices(Vertex outVertex, Vertex inVertex, String attributeName) throws AtlasException {
LOG.debug("Removing edge from {} to {} with attribute name {}", string(outVertex), string(inVertex),
attributeName);
String typeName = GraphHelper.getTypeName(outVertex);
String outId = GraphHelper.getIdFromVertex(outVertex);
if (outId != null && RequestContext.get().getDeletedEntityIds().contains(outId)) {
//If the reference vertex is marked for deletion, skip updating the reference
return;
}
IDataType type = typeSystem.getDataType(IDataType.class, typeName);
AttributeInfo attributeInfo = getFieldMapping(type).fields.get(attributeName);
String propertyName = GraphHelper.getQualifiedFieldName(type, attributeName);
String edgeLabel = EDGE_LABEL_PREFIX + propertyName;
Edge edge = null;
switch (attributeInfo.dataType().getTypeCategory()) {
case CLASS:
//If its class attribute, its the only edge between two vertices
//TODO need to enable this
// if (refAttributeInfo.multiplicity == Multiplicity.REQUIRED) {
// throw new AtlasException("Can't set attribute " + refAttributeName + " to null as its required attribute");
// }
edge = GraphHelper.getEdgeForLabel(outVertex, edgeLabel);
break;
case ARRAY:
//If its array attribute, find the right edge between the two vertices and update array property
List<String> elements = outVertex.getProperty(propertyName);
if (elements != null) {
elements = new ArrayList<>(elements); //Make a copy, else list.remove reflects on titan.getProperty()
for (String elementEdgeId : elements) {
Edge elementEdge = graphHelper.getEdgeById(elementEdgeId);
if (elementEdge == null) {
continue;
}
Vertex elementVertex = elementEdge.getVertex(Direction.IN);
if (elementVertex.getId().toString().equals(inVertex.getId().toString())) {
edge = elementEdge;
if (shouldUpdateReverseAttribute || attributeInfo.isComposite) {
//if composite attribute, remove the reference as well. else, just remove the edge
//for example, when table is deleted, process still references the table
//but when column is deleted, table will not reference the deleted column
LOG.debug("Removing edge {} from the array attribute {}", string(elementEdge),
attributeName);
elements.remove(elementEdge.getId().toString());
GraphHelper.setProperty(outVertex, propertyName, elements);
}
break;
}
}
}
break;
case MAP:
//If its map attribute, find the right edge between two vertices and update map property
List<String> keys = outVertex.getProperty(propertyName);
if (keys != null) {
keys = new ArrayList<>(keys); //Make a copy, else list.remove reflects on titan.getProperty()
for (String key : keys) {
String keyPropertyName = propertyName + "." + key;
String mapEdgeId = outVertex.getProperty(keyPropertyName);
Edge mapEdge = graphHelper.getEdgeById(mapEdgeId);
Vertex mapVertex = mapEdge.getVertex(Direction.IN);
if (mapVertex.getId().toString().equals(inVertex.getId().toString())) {
edge = mapEdge;
if (shouldUpdateReverseAttribute || attributeInfo.isComposite) {
//remove this key
LOG.debug("Removing edge {}, key {} from the map attribute {}", string(mapEdge), key,
attributeName);
keys.remove(key);
GraphHelper.setProperty(outVertex, propertyName, keys);
GraphHelper.setProperty(outVertex, keyPropertyName, null);
}
break;
}
}
}
break;
case STRUCT:
case TRAIT:
break;
default:
throw new IllegalStateException("There can't be an edge from " + string(outVertex) + " to "
+ string(inVertex) + " with attribute name " + attributeName + " which is not class/array/map attribute");
}
if (edge != null) {
deleteEdge(edge);
GraphHelper.setProperty(outVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY,
RequestContext.get().getRequestTime());
}
}
protected AttributeInfo getAttributeForEdge(String edgLabel) throws AtlasException {
AtlasEdgeLabel atlasEdgeLabel = new AtlasEdgeLabel(edgLabel);
IDataType referenceType = typeSystem.getDataType(IDataType.class, atlasEdgeLabel.getTypeName());
return getFieldMapping(referenceType).fields.get(atlasEdgeLabel.getAttributeName());
}
protected FieldMapping getFieldMapping(IDataType type) {
switch (type.getTypeCategory()) {
case CLASS:
case TRAIT:
return ((HierarchicalType)type).fieldMapping();
case STRUCT:
return ((StructType)type).fieldMapping();
default:
throw new IllegalStateException("Type " + type + " doesn't have any fields!");
}
}
/**
* Delete all traits from the specified vertex.
* @param instanceVertex
* @throws AtlasException
*/
private void deleteAllTraits(Vertex instanceVertex) throws AtlasException {
List<String> traitNames = GraphHelper.getTraitNames(instanceVertex);
LOG.debug("Deleting traits {} for {}", traitNames, string(instanceVertex));
String typeName = GraphHelper.getTypeName(instanceVertex);
for (String traitNameToBeDeleted : traitNames) {
String relationshipLabel = GraphHelper.getTraitLabel(typeName, traitNameToBeDeleted);
deleteReference(instanceVertex, relationshipLabel, DataTypes.TypeCategory.TRAIT);
}
}
}
......@@ -19,14 +19,14 @@
package org.apache.atlas.repository.graph;
import com.google.common.base.Preconditions;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.AtlasException;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.RequestContext;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.repository.RepositoryException;
......@@ -37,15 +37,13 @@ import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.typesystem.exception.TraitNotFoundException;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.IDataType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.typesystem.types.TypeUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
......@@ -60,18 +58,21 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
private static final Logger LOG = LoggerFactory.getLogger(GraphBackedMetadataRepository.class);
private final GraphToTypedInstanceMapper graphToInstanceMapper;
private static TypeSystem typeSystem = TypeSystem.getInstance();
private static final GraphHelper graphHelper = GraphHelper.getInstance();
private final TitanGraph titanGraph;
private DeleteHandler deleteHandler;
private GraphToTypedInstanceMapper graphToInstanceMapper;
@Inject
public GraphBackedMetadataRepository(GraphProvider<TitanGraph> graphProvider) {
public GraphBackedMetadataRepository(GraphProvider<TitanGraph> graphProvider, DeleteHandler deleteHandler) {
this.titanGraph = graphProvider.get();
this.graphToInstanceMapper = new GraphToTypedInstanceMapper(titanGraph);
graphToInstanceMapper = new GraphToTypedInstanceMapper(titanGraph);
this.deleteHandler = deleteHandler;
}
public GraphToTypedInstanceMapper getGraphToInstanceMapper() {
......@@ -122,10 +123,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
EntityExistsException {
LOG.info("adding entities={}", entities);
try {
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper);
TypeUtils.Pair<List<String>, List<String>> idPair =
instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.CREATE, entities);
return idPair.left;
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper, deleteHandler);
instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.CREATE, entities);
return RequestContext.get().getCreatedEntityIds();
} catch (EntityExistsException e) {
throw e;
} catch (AtlasException e) {
......@@ -215,14 +215,15 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
// add the trait instance as a new vertex
final String typeName = GraphHelper.getTypeName(instanceVertex);
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper);
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper, deleteHandler);
instanceToGraphMapper.mapTraitInstanceToVertex(traitInstance,
typeSystem.getDataType(ClassType.class, typeName), instanceVertex);
// update the traits in entity once adding trait instance is successful
GraphHelper.addProperty(instanceVertex, Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
GraphHelper.setProperty(instanceVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.valueOf(System.currentTimeMillis()));
GraphHelper.setProperty(instanceVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY,
RequestContext.get().getRequestTime());
} catch (RepositoryException e) {
throw e;
......@@ -254,23 +255,12 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
try {
final String entityTypeName = GraphHelper.getTypeName(instanceVertex);
String relationshipLabel = GraphHelper.getTraitLabel(entityTypeName, traitNameToBeDeleted);
Iterator<Edge> results = instanceVertex.getEdges(Direction.OUT, relationshipLabel).iterator();
if (results.hasNext()) { // there should only be one edge for this label
final Edge traitEdge = results.next();
final Vertex traitVertex = traitEdge.getVertex(Direction.IN);
// remove the edge to the trait instance from the repository
titanGraph.removeEdge(traitEdge);
if (traitVertex != null) { // remove the trait instance from the repository
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper);
instanceToGraphMapper.deleteTraitVertex(traitNameToBeDeleted, traitVertex);
// update the traits in entity once trait removal is successful
traitNames.remove(traitNameToBeDeleted);
updateTraits(instanceVertex, traitNames);
}
}
deleteHandler.deleteReference(instanceVertex, relationshipLabel, DataTypes.TypeCategory.TRAIT);
// update the traits in entity once trait removal is successful
traitNames.remove(traitNameToBeDeleted);
updateTraits(instanceVertex, traitNames);
} catch (Exception e) {
throw new RepositoryException(e);
}
......@@ -285,7 +275,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
for (String traitName : traitNames) {
GraphHelper.addProperty(instanceVertex, Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
}
GraphHelper.setProperty(instanceVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.valueOf(System.currentTimeMillis()));
GraphHelper.setProperty(instanceVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY,
RequestContext.get().getRequestTime());
}
@Override
......@@ -293,9 +284,11 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
public TypeUtils.Pair<List<String>, List<String>> updateEntities(ITypedReferenceableInstance... entitiesUpdated) throws RepositoryException {
LOG.info("updating entity {}", entitiesUpdated);
try {
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper);
return instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_FULL,
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper, deleteHandler);
instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_FULL,
entitiesUpdated);
RequestContext requestContext = RequestContext.get();
return TypeUtils.Pair.of(requestContext.getCreatedEntityIds(), requestContext.getUpdatedEntityIds());
} catch (AtlasException e) {
throw new RepositoryException(e);
}
......@@ -306,8 +299,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
public TypeUtils.Pair<List<String>, List<String>> updatePartial(ITypedReferenceableInstance entity) throws RepositoryException {
LOG.info("updating entity {}", entity);
try {
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper);
return instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_PARTIAL, entity);
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper, deleteHandler);
instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_PARTIAL, entity);
RequestContext requestContext = RequestContext.get();
return TypeUtils.Pair.of(requestContext.getCreatedEntityIds(), requestContext.getUpdatedEntityIds());
} catch (AtlasException e) {
throw new RepositoryException(e);
}
......@@ -315,13 +310,12 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
@Override
@GraphTransaction
public TypeUtils.Pair<List<String>, List<ITypedReferenceableInstance>> deleteEntities(List<String> guids) throws RepositoryException {
public TypeUtils.Pair<List<String>, List<ITypedReferenceableInstance>> deleteEntities(List<String> guids) throws RepositoryException {
if (guids == null || guids.size() == 0) {
throw new IllegalArgumentException("guids must be non-null and non-empty");
}
TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper);
for (String guid : guids) {
if (guid == null) {
LOG.warn("deleteEntities: Ignoring null guid");
......@@ -329,8 +323,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
try {
Vertex instanceVertex = graphHelper.getVertexForGUID(guid);
String typeName = GraphHelper.getTypeName(instanceVertex);
instanceToGraphMapper.deleteEntity(typeName, instanceVertex);
deleteHandler.deleteEntity(instanceVertex);
} catch (EntityNotFoundException e) {
// Entity does not exist - treat as non-error, since the caller
// wanted to delete the entity and it's already gone.
......@@ -340,7 +333,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
throw new RepositoryException(e);
}
}
return new TypeUtils.Pair<>(
instanceToGraphMapper.getDeletedEntityGuids(), instanceToGraphMapper.getDeletedEntities());
RequestContext requestContext = RequestContext.get();
return new TypeUtils.Pair<>(requestContext.getDeletedEntityIds(), requestContext.getDeletedEntities());
}
}
......@@ -100,7 +100,11 @@ public class GraphBackedSearchIndexer implements SearchIndexer, ActiveStateChang
management.buildIndex(Constants.EDGE_INDEX, Edge.class).buildMixedIndex(Constants.BACKING_INDEX);
// create a composite index for guid as its unique
createCompositeAndMixedIndex(management, Constants.GUID_PROPERTY_KEY, String.class, true, Cardinality.SINGLE, true);
createCompositeAndMixedIndex(management, Constants.GUID_PROPERTY_KEY, String.class, true,
Cardinality.SINGLE, true);
// create a composite index for entity state
createCompositeAndMixedIndex(management, Constants.STATE_PROPERTY_KEY, String.class, false, Cardinality.SINGLE, true);
// create a composite and mixed index for type since it can be combined with other keys
createCompositeAndMixedIndex(management, Constants.ENTITY_TYPE_PROPERTY_KEY, String.class, false, Cardinality.SINGLE,
......@@ -223,13 +227,13 @@ public class GraphBackedSearchIndexer implements SearchIndexer, ActiveStateChang
switch (field.dataType().getTypeCategory()) {
case PRIMITIVE:
Cardinality cardinality = getCardinality(field.multiplicity);
createCompositeAndMixedIndex(management, propertyName, getPrimitiveClass(field.dataType()), field.isUnique,
createCompositeAndMixedIndex(management, propertyName, getPrimitiveClass(field.dataType()), false,
cardinality, false);
break;
case ENUM:
cardinality = getCardinality(field.multiplicity);
createCompositeAndMixedIndex(management, propertyName, String.class, field.isUnique, cardinality, false);
createCompositeAndMixedIndex(management, propertyName, String.class, false, cardinality, false);
break;
case ARRAY:
......
......@@ -23,11 +23,12 @@ import com.thinkaurelius.titan.core.TitanProperty;
import com.thinkaurelius.titan.core.TitanVertex;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Element;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.AtlasException;
import org.apache.atlas.RequestContext;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.ITypedInstance;
......@@ -40,7 +41,6 @@ import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.HierarchicalType;
import org.apache.atlas.typesystem.types.IDataType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.typesystem.types.TypeUtils.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -77,7 +77,7 @@ public final class GraphHelper {
final String guid = UUID.randomUUID().toString();
final Vertex vertexWithIdentity = createVertexWithoutIdentity(typedInstance.getTypeName(),
new Id(guid, 0 , typedInstance.getTypeName()), superTypeNames);
new Id(guid, 0, typedInstance.getTypeName()), superTypeNames);
// add identity
setProperty(vertexWithIdentity, Constants.GUID_PROPERTY_KEY, guid);
......@@ -85,9 +85,6 @@ public final class GraphHelper {
// add version information
setProperty(vertexWithIdentity, Constants.VERSION_PROPERTY_KEY, typedInstance.getId().version);
// add state information
setProperty(vertexWithIdentity, Constants.STATE_PROPERTY_KEY, Id.EntityState.ACTIVE.name());
return vertexWithIdentity;
}
......@@ -99,41 +96,120 @@ public final class GraphHelper {
// add type information
setProperty(vertexWithoutIdentity, Constants.ENTITY_TYPE_PROPERTY_KEY, typeName);
// add super types
for (String superTypeName : superTypeNames) {
addProperty(vertexWithoutIdentity, Constants.SUPER_TYPES_PROPERTY_KEY, superTypeName);
}
// add state information
setProperty(vertexWithoutIdentity, Constants.STATE_PROPERTY_KEY, Id.EntityState.ACTIVE.name());
// add timestamp information
setProperty(vertexWithoutIdentity, Constants.TIMESTAMP_PROPERTY_KEY, System.currentTimeMillis());
setProperty(vertexWithoutIdentity, Constants.TIMESTAMP_PROPERTY_KEY, RequestContext.get().getRequestTime());
return vertexWithoutIdentity;
}
public Edge addEdge(Vertex fromVertex, Vertex toVertex, String edgeLabel) {
LOG.debug("Adding edge for {} -> label {} -> {}", fromVertex, edgeLabel, toVertex);
LOG.debug("Adding edge for {} -> label {} -> {}", string(fromVertex), edgeLabel, string(toVertex));
Edge edge = titanGraph.addEdge(null, fromVertex, toVertex, edgeLabel);
LOG.debug("Added edge for {} -> label {}, id {} -> {}", fromVertex, edgeLabel, edge.getId(), toVertex);
setProperty(edge, Constants.STATE_PROPERTY_KEY, Id.EntityState.ACTIVE.name());
setProperty(edge, Constants.TIMESTAMP_PROPERTY_KEY, RequestContext.get().getRequestTime());
setProperty(edge, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, RequestContext.get().getRequestTime());
LOG.debug("Added {}", string(edge));
return edge;
}
public Vertex findVertex(String propertyKey, Object value) {
LOG.debug("Finding vertex for {}={}", propertyKey, value);
public Edge getOrCreateEdge(Vertex outVertex, Vertex inVertex, String edgeLabel) {
Iterable<Edge> edges = inVertex.getEdges(Direction.IN, edgeLabel);
for (Edge edge : edges) {
if (edge.getVertex(Direction.OUT).getId().toString().equals(outVertex.getId().toString())) {
return edge;
}
}
return addEdge(outVertex, inVertex, edgeLabel);
}
/**
* Args of the format prop1, key1, prop2, key2...
* Searches for a vertex with prop1=key1 && prop2=key2
* @param args
* @return vertex with the given property keys
* @throws EntityNotFoundException
*/
private Vertex findVertex(Object... args) throws EntityNotFoundException {
StringBuilder condition = new StringBuilder();
GraphQuery query = titanGraph.query();
for (int i = 0 ; i < args.length; i+=2) {
query = query.has((String) args[i], args[i+1]);
condition.append(args[i]).append(" = ").append(args[i+1]).append(", ");
}
String conditionStr = condition.toString();
LOG.debug("Finding vertex with {}", conditionStr);
GraphQuery query = titanGraph.query().has(propertyKey, value);
Iterator<Vertex> results = query.vertices().iterator();
// returning one since entityType, qualifiedName should be unique
return results.hasNext() ? results.next() : null;
Vertex vertex = results.hasNext() ? results.next() : null;
if (vertex == null) {
LOG.debug("Could not find a vertex with {}", condition.toString());
throw new EntityNotFoundException("Could not find an entity in the repository with " + conditionStr);
} else {
LOG.debug("Found a vertex {} with {}", string(vertex), conditionStr);
}
return vertex;
}
public static Iterable<Edge> getOutGoingEdgesByLabel(Vertex instanceVertex, String edgeLabel) {
public static Iterator<Edge> getOutGoingEdgesByLabel(Vertex instanceVertex, String edgeLabel) {
LOG.debug("Finding edges for {} with label {}", string(instanceVertex), edgeLabel);
if(instanceVertex != null && edgeLabel != null) {
return instanceVertex.getEdges(Direction.OUT, edgeLabel);
return instanceVertex.getEdges(Direction.OUT, edgeLabel).iterator();
}
return null;
}
public Edge getOutGoingEdgeById(String edgeId) {
/**
* Returns the active edge for the given edge label.
* If the vertex is deleted and there is no active edge, it returns the latest deleted edge
* @param vertex
* @param edgeLabel
* @return
*/
public static Edge getEdgeForLabel(Vertex vertex, String edgeLabel) {
String vertexState = vertex.getProperty(Constants.STATE_PROPERTY_KEY);
Iterator<Edge> iterator = GraphHelper.getOutGoingEdgesByLabel(vertex, edgeLabel);
Edge latestDeletedEdge = null;
long latestDeletedEdgeTime = Long.MIN_VALUE;
while (iterator != null && iterator.hasNext()) {
Edge edge = iterator.next();
String edgeState = edge.getProperty(Constants.STATE_PROPERTY_KEY);
if (edgeState == null || Id.EntityState.ACTIVE.name().equals(edgeState)) {
LOG.debug("Found {}", string(edge));
return edge;
} else {
Long modificationTime = edge.getProperty(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY);
if (modificationTime != null && modificationTime >= latestDeletedEdgeTime) {
latestDeletedEdgeTime = modificationTime;
latestDeletedEdge = edge;
}
}
}
//If the vertex is deleted, return latest deleted edge
if (Id.EntityState.DELETED.equals(vertexState)) {
LOG.debug("Found {}", string(latestDeletedEdge));
return latestDeletedEdge;
}
return null;
}
public Edge getEdgeById(String edgeId) {
if(edgeId != null) {
return titanGraph.getEdge(edgeId);
}
......@@ -154,60 +230,47 @@ public final class GraphHelper {
+ edge.getVertex(Direction.IN) + "]";
}
public static void setProperty(Vertex vertex, String propertyName, Object value) {
LOG.debug("Setting property {} = \"{}\" to vertex {}", propertyName, value, vertex);
Object existValue = vertex.getProperty(propertyName);
public static <T extends Element> void setProperty(T element, String propertyName, Object value) {
String elementStr = string(element);
LOG.debug("Setting property {} = \"{}\" to {}", propertyName, value, elementStr);
Object existValue = element.getProperty(propertyName);
if(value == null || (value instanceof Collection && ((Collection) value).isEmpty())) {
if(existValue != null) {
LOG.info("Removing property - {} value from vertex {}", propertyName, vertex);
vertex.removeProperty(propertyName);
LOG.info("Removing property - {} value from {}", propertyName, elementStr);
element.removeProperty(propertyName);
}
} else {
if (!value.equals(existValue)) {
vertex.setProperty(propertyName, value);
LOG.debug("Set property {} = \"{}\" to vertex {}", propertyName, value, vertex);
element.setProperty(propertyName, value);
LOG.debug("Set property {} = \"{}\" to {}", propertyName, value, elementStr);
}
}
}
private static <T extends Element> String string(T element) {
if (element instanceof Vertex) {
return string((Vertex) element);
} else if (element instanceof Edge) {
return string((Edge)element);
}
return element.toString();
}
public static void addProperty(Vertex vertex, String propertyName, Object value) {
LOG.debug("Setting property {} = \"{}\" to vertex {}", propertyName, value, vertex);
LOG.debug("Adding property {} = \"{}\" to vertex {}", propertyName, value, string(vertex));
((TitanVertex)vertex).addProperty(propertyName, value);
}
public Edge removeRelation(String edgeId, boolean cascade) {
LOG.debug("Removing edge with id {}", edgeId);
final Edge edge = titanGraph.getEdge(edgeId);
titanGraph.removeEdge(edge);
LOG.info("Removed edge {}", edge);
if (cascade) {
Vertex referredVertex = edge.getVertex(Direction.IN);
removeVertex(referredVertex);
}
return edge;
}
/**
* Remove the specified edge from the graph.
*
* @param edge
*/
public void removeEdge(Edge edge) {
LOG.debug("Removing edge {}", edge);
String edgeString = string(edge);
LOG.debug("Removing {}", edgeString);
titanGraph.removeEdge(edge);
LOG.info("Removed edge {}", edge);
}
/**
* Return the edge and target vertex for the specified edge ID.
*
* @param edgeId
* @return edge and target vertex
*/
public Pair<Edge, Vertex> getEdgeAndTargetVertex(String edgeId) {
final Edge edge = titanGraph.getEdge(edgeId);
Vertex referredVertex = edge.getVertex(Direction.IN);
return Pair.of(edge, referredVertex);
LOG.info("Removed {}", edgeString);
}
/**
......@@ -216,27 +279,22 @@ public final class GraphHelper {
* @param vertex
*/
public void removeVertex(Vertex vertex) {
LOG.debug("Removing vertex {}", vertex);
String vertexString = string(vertex);
LOG.debug("Removing {}", vertexString);
titanGraph.removeVertex(vertex);
LOG.info("Removed vertex {}", vertex);
LOG.info("Removed {}", vertexString);
}
public Vertex getVertexForGUID(String guid) throws EntityNotFoundException {
return getVertexForProperty(Constants.GUID_PROPERTY_KEY, guid);
return findVertex(Constants.GUID_PROPERTY_KEY, guid);
}
public Vertex getVertexForProperty(String propertyKey, Object value) throws EntityNotFoundException {
Vertex instanceVertex = findVertex(propertyKey, value);
if (instanceVertex == null) {
LOG.debug("Could not find a vertex with {}={}", propertyKey, value);
throw new EntityNotFoundException("Could not find an entity in the repository with " + propertyKey + "="
+ value);
} else {
LOG.debug("Found a vertex {} with {}={}", instanceVertex, propertyKey, value);
}
return findVertex(propertyKey, value, Constants.STATE_PROPERTY_KEY, Id.EntityState.ACTIVE.name());
}
return instanceVertex;
public static String getQualifiedNameForMapKey(String prefix, String key) {
return prefix + "." + key;
}
public static String getQualifiedFieldName(ITypedInstance typedInstance, AttributeInfo attributeInfo) throws AtlasException {
......@@ -277,6 +335,10 @@ public final class GraphHelper {
vertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY), dataTypeName);
}
public static String getIdFromVertex(Vertex vertex) {
return vertex.<String>getProperty(Constants.GUID_PROPERTY_KEY);
}
public static String getTypeName(Vertex instanceVertex) {
return instanceVertex.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
}
......@@ -292,7 +354,7 @@ public final class GraphHelper {
*/
public Vertex getVertexForInstanceByUniqueAttribute(ClassType classType, IReferenceableInstance instance)
throws AtlasException {
LOG.debug("Checking if there is an instance with the same unique attributes for instance {}", instance);
LOG.debug("Checking if there is an instance with the same unique attributes for instance {}", instance.toShortString());
Vertex result = null;
for (AttributeInfo attributeInfo : classType.fieldMapping().fields.values()) {
if (attributeInfo.isUnique) {
......@@ -322,4 +384,18 @@ public final class GraphHelper {
}
LOG.debug("*******************Graph Dump****************************");
}
public static String string(ITypedReferenceableInstance instance) {
return String.format("entity[type=%s guid=%]", instance.getTypeName(), instance.getId()._getId());
}
public static String string(Vertex vertex) {
return String.format("vertex[id=%s type=%s guid=%s]", vertex.getId().toString(), getTypeName(vertex),
getIdFromVertex(vertex));
}
public static String string(Edge edge) {
return String.format("edge[id=%s label=%s from %s -> to %s]", edge.getId().toString(), edge.getLabel(),
string(edge.getVertex(Direction.OUT)), string(edge.getVertex(Direction.IN)));
}
}
\ No newline at end of file
......@@ -17,6 +17,7 @@
*/
package org.apache.atlas.repository.graph;
import com.google.inject.Singleton;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
......@@ -43,15 +44,19 @@ import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.apache.atlas.repository.graph.GraphHelper.string;
@Singleton
public final class GraphToTypedInstanceMapper {
private static final Logger LOG = LoggerFactory.getLogger(GraphToTypedInstanceMapper.class);
private static TypeSystem typeSystem = TypeSystem.getInstance();
private final TitanGraph titanGraph;
private static final GraphHelper graphHelper = GraphHelper.getInstance();
private TitanGraph titanGraph;
public GraphToTypedInstanceMapper(TitanGraph titanGraph) {
this.titanGraph = titanGraph;
......@@ -97,12 +102,12 @@ public final class GraphToTypedInstanceMapper {
}
}
private void mapVertexToAttribute(Vertex instanceVertex, ITypedInstance typedInstance,
AttributeInfo attributeInfo) throws AtlasException {
LOG.debug("Mapping attributeInfo {}", attributeInfo.name);
final IDataType dataType = attributeInfo.dataType();
final String vertexPropertyName = GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo);
String relationshipLabel = GraphHelper.getEdgeLabel(typedInstance, attributeInfo);
switch (dataType.getTypeCategory()) {
case PRIMITIVE:
......@@ -128,7 +133,9 @@ public final class GraphToTypedInstanceMapper {
break;
case STRUCT:
mapVertexToStructInstance(instanceVertex, typedInstance, attributeInfo);
ITypedStruct structInstance = mapVertexToStructInstance(instanceVertex,
(StructType) attributeInfo.dataType(), relationshipLabel, null);
typedInstance.set(attributeInfo.name, structInstance);
break;
case TRAIT:
......@@ -136,9 +143,8 @@ public final class GraphToTypedInstanceMapper {
break;
case CLASS:
String relationshipLabel = GraphHelper.getEdgeLabel(typedInstance, attributeInfo);
Object idOrInstance = mapVertexToClassReference(instanceVertex, attributeInfo, relationshipLabel,
attributeInfo.dataType());
attributeInfo.dataType(), null);
if (idOrInstance != null) {
typedInstance.set(attributeInfo.name, idOrInstance);
}
......@@ -150,25 +156,30 @@ public final class GraphToTypedInstanceMapper {
}
private Object mapVertexToClassReference(Vertex instanceVertex, AttributeInfo attributeInfo,
String relationshipLabel, IDataType dataType) throws AtlasException {
String relationshipLabel, IDataType dataType, String edgeId) throws AtlasException {
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
Iterator<Edge> results = instanceVertex.getEdges(Direction.OUT, relationshipLabel).iterator();
if (results.hasNext()) {
final Vertex referenceVertex = results.next().getVertex(Direction.IN);
if (referenceVertex != null) {
final String guid = referenceVertex.getProperty(Constants.GUID_PROPERTY_KEY);
LOG.debug("Found vertex {} for label {} with guid {}", referenceVertex, relationshipLabel, guid);
if (attributeInfo.isComposite) {
//Also, when you retrieve a type's instance, you get the complete object graph of the composites
LOG.debug("Found composite, mapping vertex to instance");
return mapGraphToTypedInstance(guid, referenceVertex);
} else {
Id referenceId =
Edge edge;
if (edgeId == null) {
edge = GraphHelper.getEdgeForLabel(instanceVertex, relationshipLabel);;
} else {
edge = graphHelper.getEdgeById(edgeId);
}
if (edge != null) {
final Vertex referenceVertex = edge.getVertex(Direction.IN);
final String guid = referenceVertex.getProperty(Constants.GUID_PROPERTY_KEY);
LOG.debug("Found vertex {} for label {} with guid {}", referenceVertex, relationshipLabel, guid);
if (attributeInfo.isComposite) {
//Also, when you retrieve a type's instance, you get the complete object graph of the composites
LOG.debug("Found composite, mapping vertex to instance");
return mapGraphToTypedInstance(guid, referenceVertex);
} else {
Id referenceId =
new Id(guid, referenceVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY),
dataType.getName());
LOG.debug("Found non-composite, adding id {} ", referenceId);
return referenceId;
}
dataType.getName());
LOG.debug("Found non-composite, adding id {} ", referenceId);
return referenceId;
}
}
......@@ -212,8 +223,7 @@ public final class GraphToTypedInstanceMapper {
break;
case STRUCT:
return getStructInstanceFromVertex(instanceVertex, elementType, attributeInfo.name, edgeLabel,
(String) value);
return mapVertexToStructInstance(instanceVertex, (StructType) elementType, edgeLabel, (String) value);
case CLASS:
return mapVertexToClassReference(instanceVertex, attributeInfo, edgeLabel, elementType, (String) value);
......@@ -252,83 +262,27 @@ public final class GraphToTypedInstanceMapper {
}
}
private ITypedStruct getStructInstanceFromVertex(Vertex instanceVertex, IDataType elemType,
String attributeName, String relationshipLabel, String edgeId) throws AtlasException {
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) {
if (edgeId.equals(String.valueOf(edge.getId()))) {
Vertex structInstanceVertex = edge.getVertex(Direction.IN);
LOG.debug("mapping vertex {} to struct {}", structInstanceVertex, attributeName);
if (structInstanceVertex != null) {
LOG.debug("Found struct instance vertex {}, mapping to instance {} ", structInstanceVertex,
elemType.getName());
StructType structType = typeSystem.getDataType(StructType.class, elemType.getName());
ITypedStruct structInstance = structType.createInstance();
mapVertexToInstance(structInstanceVertex, structInstance, structType.fieldMapping().fields);
return structInstance;
}
break;
}
}
return null;
}
private Object mapVertexToClassReference(Vertex instanceVertex, AttributeInfo attributeInfo,
String relationshipLabel, IDataType dataType, String edgeId) throws AtlasException {
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) {
if (edgeId.equals(String.valueOf(edge.getId()))) {
final Vertex referenceVertex = edge.getVertex(Direction.IN);
if (referenceVertex != null) {
final String guid = referenceVertex.getProperty(Constants.GUID_PROPERTY_KEY);
LOG.debug("Found vertex {} for label {} with guid {}", referenceVertex, relationshipLabel,
guid);
if (attributeInfo.isComposite) {
//Also, when you retrieve a type's instance, you get the complete object graph of the composites
LOG.debug("Found composite, mapping vertex to instance");
return mapGraphToTypedInstance(guid, referenceVertex);
} else {
Id referenceId =
new Id(guid, referenceVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY),
dataType.getName());
LOG.debug("Found non-composite, adding id {} ", referenceId);
return referenceId;
}
}
break;
}
}
return null;
}
private void mapVertexToStructInstance(Vertex instanceVertex, ITypedInstance typedInstance,
AttributeInfo attributeInfo) throws AtlasException {
LOG.debug("mapping vertex {} to struct {}", instanceVertex, attributeInfo.name);
StructType structType = typeSystem.getDataType(StructType.class, attributeInfo.dataType().getName());
private ITypedStruct mapVertexToStructInstance(Vertex instanceVertex, StructType structType,
String relationshipLabel, String edgeId) throws AtlasException {
LOG.debug("mapping {} to struct {}", string(instanceVertex), relationshipLabel);
ITypedStruct structInstance = null;
String relationshipLabel = GraphHelper.getEdgeLabel(typedInstance, attributeInfo);
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
final Iterable<Edge> edges = instanceVertex.getEdges(Direction.OUT, relationshipLabel);
if (edges.iterator().hasNext()) {
structInstance = structType.createInstance();
typedInstance.set(attributeInfo.name, structInstance);
Edge edge;
if (edgeId == null) {
edge = GraphHelper.getEdgeForLabel(instanceVertex, relationshipLabel);
} else {
edge = graphHelper.getEdgeById(edgeId);
}
for (Edge edge : edges) {
final Vertex structInstanceVertex = edge.getVertex(Direction.IN);
if (structInstanceVertex != null) {
LOG.debug("Found struct instance vertex {}, mapping to instance {} ", structInstanceVertex,
if (edge != null) {
structInstance = structType.createInstance();
Vertex structInstanceVertex = edge.getVertex(Direction.IN);
LOG.debug("Found struct instance {}, mapping to instance {} ", string(structInstanceVertex),
structInstance.getTypeName());
mapVertexToInstance(structInstanceVertex, structInstance, structType.fieldMapping().fields);
break;
}
mapVertexToInstance(structInstanceVertex, structInstance, structType.fieldMapping().fields);
}
return structInstance;
}
private void mapVertexToTraitInstance(Vertex instanceVertex, ITypedReferenceableInstance typedInstance,
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.graph;
import com.google.inject.Inject;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.AtlasException;
import org.apache.atlas.typesystem.types.TypeSystem;
public class HardDeleteHandler extends DeleteHandler {
private static final GraphHelper graphHelper = GraphHelper.getInstance();
@Inject
public HardDeleteHandler(TypeSystem typeSystem) {
super(typeSystem, true);
}
@Override
protected void _deleteVertex(Vertex instanceVertex) {
graphHelper.removeVertex(instanceVertex);
}
@Override
protected void deleteEdge(Edge edge) throws AtlasException {
graphHelper.removeEdge(edge);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.graph;
import com.google.inject.Inject;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.AtlasException;
import org.apache.atlas.RequestContext;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.TypeSystem;
import static org.apache.atlas.repository.Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY;
import static org.apache.atlas.repository.Constants.STATE_PROPERTY_KEY;
public class SoftDeleteHandler extends DeleteHandler {
@Inject
public SoftDeleteHandler(TypeSystem typeSystem) {
super(typeSystem, false);
}
@Override
protected void _deleteVertex(Vertex instanceVertex) {
Id.EntityState state = Id.EntityState.valueOf((String) instanceVertex.getProperty(STATE_PROPERTY_KEY));
if (state != Id.EntityState.DELETED) {
GraphHelper.setProperty(instanceVertex, STATE_PROPERTY_KEY, Id.EntityState.DELETED.name());
GraphHelper.setProperty(instanceVertex, MODIFICATION_TIMESTAMP_PROPERTY_KEY, RequestContext.get().getRequestTime());
}
}
@Override
protected void deleteEdge(Edge edge) throws AtlasException {
Id.EntityState state = Id.EntityState.valueOf((String) edge.getProperty(STATE_PROPERTY_KEY));
if (state != Id.EntityState.DELETED) {
GraphHelper.setProperty(edge, STATE_PROPERTY_KEY, Id.EntityState.DELETED.name());
GraphHelper.setProperty(edge, MODIFICATION_TIMESTAMP_PROPERTY_KEY, RequestContext.get().getRequestTime());
}
}
}
......@@ -17,12 +17,13 @@
*/
package org.apache.atlas.repository.graph;
import com.google.inject.Inject;
import com.thinkaurelius.titan.core.SchemaViolationException;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.AtlasException;
import org.apache.atlas.RequestContext;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.RepositoryException;
import org.apache.atlas.typesystem.IReferenceableInstance;
......@@ -36,17 +37,13 @@ import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.atlas.typesystem.types.IConstructableType;
import org.apache.atlas.typesystem.types.IDataType;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.ObjectGraphWalker;
import org.apache.atlas.typesystem.types.StructType;
import org.apache.atlas.typesystem.types.TraitType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.typesystem.types.TypeUtils;
import org.apache.atlas.typesystem.types.TypeUtils.Pair;
import org.apache.atlas.utils.MD5Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -58,70 +55,70 @@ import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.atlas.repository.graph.GraphHelper.string;
public final class TypedInstanceToGraphMapper {
private static final Logger LOG = LoggerFactory.getLogger(TypedInstanceToGraphMapper.class);
private final Map<Id, Vertex> idToVertexMap = new HashMap<>();
//Maintains a set of Guid based Ids that are referenced/created during graph walk
private final Set<Id> referencedIds = new HashSet<>();
private final TypeSystem typeSystem = TypeSystem.getInstance();
private final List<String> deletedEntityGuids = new ArrayList<>();
private final List<ITypedReferenceableInstance> deletedEntities = new ArrayList<>();
private final GraphToTypedInstanceMapper graphToTypedInstanceMapper;
private static final GraphHelper graphHelper = GraphHelper.getInstance();
private DeleteHandler deleteHandler;
private GraphToTypedInstanceMapper graphToTypedInstanceMapper;
@Inject
public TypedInstanceToGraphMapper(GraphToTypedInstanceMapper graphToTypedInstanceMapper, DeleteHandler deleteHandler) {
this.graphToTypedInstanceMapper = graphToTypedInstanceMapper;
this.deleteHandler = deleteHandler;
}
private final String SIGNATURE_HASH_PROPERTY_KEY = Constants.INTERNAL_PROPERTY_KEY_PREFIX + "signature";
public enum Operation {
CREATE,
UPDATE_PARTIAL,
UPDATE_FULL,
DELETE
UPDATE_FULL
}
public TypedInstanceToGraphMapper(GraphToTypedInstanceMapper graphToTypedInstanceMapper) {
this.graphToTypedInstanceMapper = graphToTypedInstanceMapper;
}
TypeUtils.Pair<List<String>, List<String>> mapTypedInstanceToGraph(Operation operation, ITypedReferenceableInstance... typedInstances)
throws AtlasException {
List<String> createdIds = new ArrayList<>();
List<String> updatedIds = new ArrayList<>();
void mapTypedInstanceToGraph(Operation operation, ITypedReferenceableInstance... typedInstances)
throws AtlasException {
RequestContext requestContext = RequestContext.get();
for (ITypedReferenceableInstance typedInstance : typedInstances) {
LOG.debug("Adding/updating entity {}", typedInstance);
Collection<IReferenceableInstance> newInstances = walkClassInstances(typedInstance);
TypeUtils.Pair<List<ITypedReferenceableInstance>, List<ITypedReferenceableInstance>> instancesPair =
createVerticesAndDiscoverInstances(newInstances);
List<ITypedReferenceableInstance> entitiesToCreate = instancesPair.left;
List<ITypedReferenceableInstance> entitiesToUpdate = instancesPair.right;
switch (operation) {
case CREATE:
List<String> ids = addOrUpdateAttributesAndTraits(operation, instancesPair.left);
createdIds.addAll(ids);
addFullTextProperty(instancesPair.left);
break;
case UPDATE_FULL:
case UPDATE_PARTIAL:
ids = addOrUpdateAttributesAndTraits(Operation.CREATE, instancesPair.left);
createdIds.addAll(ids);
ids = addOrUpdateAttributesAndTraits(operation, instancesPair.right);
updatedIds.addAll(ids);
addFullTextProperty(instancesPair.left);
addFullTextProperty(instancesPair.right);
break;
default:
throw new UnsupportedOperationException("Not handled - " + operation);
case CREATE:
List<String> ids = addOrUpdateAttributesAndTraits(operation, entitiesToCreate);
addFullTextProperty(entitiesToCreate);
requestContext.recordCreatedEntities(ids);
break;
case UPDATE_FULL:
case UPDATE_PARTIAL:
ids = addOrUpdateAttributesAndTraits(Operation.CREATE, entitiesToCreate);
requestContext.recordCreatedEntities(ids);
ids = addOrUpdateAttributesAndTraits(operation, entitiesToUpdate);
requestContext.recordUpdatedEntities(ids);
addFullTextProperty(entitiesToCreate);
addFullTextProperty(entitiesToUpdate);
break;
default:
throw new UnsupportedOperationException("Not handled - " + operation);
}
}
return TypeUtils.Pair.of(createdIds, updatedIds);
}
private Collection<IReferenceableInstance> walkClassInstances(ITypedReferenceableInstance typedInstance)
......@@ -129,7 +126,7 @@ public final class TypedInstanceToGraphMapper {
EntityProcessor entityProcessor = new EntityProcessor();
try {
LOG.debug("Walking the object graph for instance {}", typedInstance.getTypeName());
LOG.debug("Walking the object graph for instance {}", typedInstance.toShortString());
new ObjectGraphWalker(typeSystem, entityProcessor, typedInstance).walk();
} catch (AtlasException me) {
throw new RepositoryException("TypeSystem error when walking the ObjectGraph", me);
......@@ -155,7 +152,7 @@ public final class TypedInstanceToGraphMapper {
private String addOrUpdateAttributesAndTraits(Operation operation, ITypedReferenceableInstance typedInstance)
throws AtlasException {
LOG.debug("Adding/Updating typed instance {}", typedInstance.getTypeName());
LOG.debug("Adding/Updating typed instance {}", typedInstance.toShortString());
Id id = typedInstance.getId();
if (id == null) { // oops
......@@ -173,8 +170,6 @@ public final class TypedInstanceToGraphMapper {
if (Operation.CREATE.equals(operation)) {
//TODO - Handle Trait updates
addTraits(typedInstance, instanceVertex, classType);
} else if (Operation.UPDATE_FULL.equals(operation) || Operation.UPDATE_PARTIAL.equals(operation)) {
GraphHelper.setProperty(instanceVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.valueOf(System.currentTimeMillis()));
}
return getId(typedInstance)._getId();
}
......@@ -182,78 +177,58 @@ public final class TypedInstanceToGraphMapper {
void mapInstanceToVertex(ITypedInstance typedInstance, Vertex instanceVertex,
Map<String, AttributeInfo> fields, boolean mapOnlyUniqueAttributes, Operation operation)
throws AtlasException {
LOG.debug("Mapping instance {} of {} to vertex {}", typedInstance, typedInstance.getTypeName(),
instanceVertex);
LOG.debug("Mapping instance {} to vertex {}", typedInstance.toShortString(), string(instanceVertex));
for (AttributeInfo attributeInfo : fields.values()) {
if (mapOnlyUniqueAttributes && !attributeInfo.isUnique) {
continue;
}
mapAttributesToVertex(typedInstance, instanceVertex, attributeInfo, operation);
mapAttributeToVertex(typedInstance, instanceVertex, attributeInfo, operation);
}
if (operation == Operation.DELETE) {
// Remove uni-directional references to the deletion candidate.
removeUnidirectionalReferences(instanceVertex);
// Remove vertex for deletion candidate.
graphHelper.removeVertex(instanceVertex);
}
GraphHelper.setProperty(instanceVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY,
RequestContext.get().getRequestTime());
}
private String getInstanceName(Vertex referencingVertex, IConstructableType referencingType) {
if (referencingType.getTypeCategory() == TypeCategory.CLASS) {
Id idFromVertex = GraphHelper.getIdFromVertex(referencingType.getName(), referencingVertex);
String instanceId = referencingType.getName() + ":" + idFromVertex._getId();
return instanceId;
}
else {
return referencingType.getName();
}
}
void mapAttributesToVertex(ITypedInstance typedInstance, Vertex instanceVertex,
AttributeInfo attributeInfo, Operation operation) throws AtlasException {
void mapAttributeToVertex(ITypedInstance typedInstance, Vertex instanceVertex,
AttributeInfo attributeInfo, Operation operation) throws AtlasException {
Object attrValue = typedInstance.get(attributeInfo.name);
LOG.debug("mapping attribute {} = {}", attributeInfo.name, attrValue);
LOG.debug("Mapping attribute {} = {}", attributeInfo.name, attrValue);
if (attrValue != null || operation == Operation.UPDATE_FULL || operation == Operation.DELETE) {
if (attrValue != null || operation == Operation.UPDATE_FULL) {
switch (attributeInfo.dataType().getTypeCategory()) {
case PRIMITIVE:
case ENUM:
if (operation != Operation.DELETE) {
mapPrimitiveOrEnumToVertex(typedInstance, instanceVertex, attributeInfo);
}
break;
case ARRAY:
mapArrayCollectionToVertex(typedInstance, instanceVertex, attributeInfo, operation);
break;
case MAP:
mapMapCollectionToVertex(typedInstance, instanceVertex, attributeInfo, operation);
break;
case STRUCT:
case CLASS:
final String propertyName = GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo);
String edgeLabel = GraphHelper.getEdgeLabel(typedInstance, attributeInfo);
Iterator<Edge> outGoingEdgesIterator =
GraphHelper.getOutGoingEdgesByLabel(instanceVertex, edgeLabel).iterator();
String currentEntry =
outGoingEdgesIterator.hasNext() ? outGoingEdgesIterator.next().getId().toString() : null;
addOrUpdateCollectionEntry(instanceVertex, attributeInfo, attributeInfo.dataType(), attrValue,
currentEntry, propertyName, operation);
break;
case TRAIT:
// do NOTHING - this is taken care of earlier
break;
default:
throw new IllegalArgumentException("Unknown type category: " + attributeInfo.dataType().getTypeCategory());
case PRIMITIVE:
case ENUM:
mapPrimitiveOrEnumToVertex(typedInstance, instanceVertex, attributeInfo);
break;
case ARRAY:
mapArrayCollectionToVertex(typedInstance, instanceVertex, attributeInfo, operation);
break;
case MAP:
mapMapCollectionToVertex(typedInstance, instanceVertex, attributeInfo, operation);
break;
case STRUCT:
case CLASS:
String edgeLabel = GraphHelper.getEdgeLabel(typedInstance, attributeInfo);
Edge currentEdge = GraphHelper.getEdgeForLabel(instanceVertex, edgeLabel);
String newEdgeId = addOrUpdateReference(instanceVertex, attributeInfo, attributeInfo.dataType(),
attrValue, currentEdge, edgeLabel, operation);
if (currentEdge != null && !currentEdge.getId().toString().equals(newEdgeId)) {
deleteHandler.deleteReference(currentEdge, attributeInfo.dataType().getTypeCategory(),
attributeInfo.isComposite);
}
break;
case TRAIT:
// do NOTHING - this is taken care of earlier
break;
default:
throw new IllegalArgumentException("Unknown type category: " + attributeInfo.dataType().getTypeCategory());
}
}
}
......@@ -265,18 +240,18 @@ public final class TypedInstanceToGraphMapper {
List<ITypedReferenceableInstance> instancesToUpdate = new ArrayList<>();
for (IReferenceableInstance instance : instances) {
LOG.debug("Discovering instance to create/update for {}", instance);
LOG.debug("Discovering instance to create/update for {}", instance.toShortString());
ITypedReferenceableInstance newInstance;
Id id = instance.getId();
if (!idToVertexMap.containsKey(id)) {
Vertex instanceVertex;
if (id.isAssigned()) { // has a GUID
LOG.debug("Instance {} has an assigned id", instance.getId()._getId());
LOG.debug("Instance has an assigned id {}", instance.getId()._getId());
instanceVertex = graphHelper.getVertexForGUID(id.id);
if (!(instance instanceof ReferenceableInstance)) {
throw new IllegalStateException(
String.format("%s is not of type ITypedReferenceableInstance", instance));
String.format("%s is not of type ITypedReferenceableInstance", instance.toShortString()));
}
newInstance = (ITypedReferenceableInstance) instance;
instancesToUpdate.add(newInstance);
......@@ -288,7 +263,7 @@ public final class TypedInstanceToGraphMapper {
//no entity with the given unique attribute, create new
if (instanceVertex == null) {
LOG.debug("Creating new vertex for instance {}", instance);
LOG.debug("Creating new vertex for instance {}", instance.toShortString());
newInstance = classType.convert(instance, Multiplicity.REQUIRED);
instanceVertex = graphHelper.createVertexWithIdentity(newInstance, classType.getAllSuperTypeNames());
instancesToCreate.add(newInstance);
......@@ -297,21 +272,18 @@ public final class TypedInstanceToGraphMapper {
mapInstanceToVertex(newInstance, instanceVertex, classType.fieldMapping().fields, true, Operation.CREATE);
} else {
LOG.debug("Re-using existing vertex {} for instance {}", instanceVertex.getId(), instance);
LOG.debug("Re-using existing vertex {} for instance {}", string(instanceVertex), instance.toShortString());
if (!(instance instanceof ReferenceableInstance)) {
throw new IllegalStateException(
String.format("%s is not of type ITypedReferenceableInstance", instance));
String.format("%s is not of type ITypedReferenceableInstance", instance.toShortString()));
}
newInstance = (ITypedReferenceableInstance) instance;
instancesToUpdate.add(newInstance);
}
}
//Set the id in the new instance
idToVertexMap.put(id, instanceVertex);
referencedIds.add(GraphHelper.getIdFromVertex(instance.getTypeName(), instanceVertex));
}
}
return TypeUtils.Pair.of(instancesToCreate, instancesToUpdate);
......@@ -338,165 +310,158 @@ public final class TypedInstanceToGraphMapper {
}
}
/******************************************** STRUCT **************************************************/
/******************************************** ARRAY **************************************************/
private TypeUtils.Pair<Vertex, Edge> updateStructVertex(ITypedStruct structInstance, Edge relEdge,
Operation operation) throws AtlasException {
//Already existing vertex. Update
Vertex structInstanceVertex = relEdge.getVertex(Direction.IN);
private void mapArrayCollectionToVertex(ITypedInstance typedInstance, Vertex instanceVertex,
AttributeInfo attributeInfo, Operation operation) throws AtlasException {
LOG.debug("Mapping instance {} for array attribute {} vertex {}", typedInstance.toShortString(),
attributeInfo.name, string(instanceVertex));
// Update attributes
final MessageDigest digester = MD5Utils.getDigester();
String newSignature = structInstance.getSignatureHash(digester);
String curSignature = structInstanceVertex.getProperty(SIGNATURE_HASH_PROPERTY_KEY);
List newElements = (List) typedInstance.get(attributeInfo.name);
boolean newAttributeEmpty = (newElements == null || newElements.isEmpty());
if (!newSignature.equals(curSignature)) {
//Update struct vertex instance only if there is a change
LOG.debug("Updating struct {} since signature has changed {} {} ", structInstance, curSignature, newSignature);
mapInstanceToVertex(structInstance, structInstanceVertex, structInstance.fieldMapping().fields, false, operation);
GraphHelper.setProperty(structInstanceVertex, SIGNATURE_HASH_PROPERTY_KEY, String.valueOf(newSignature));
if (newAttributeEmpty && operation != Operation.UPDATE_FULL) {
return;
}
return TypeUtils.Pair.of(structInstanceVertex, relEdge);
}
private TypeUtils.Pair<Vertex, Edge> addStructVertex(ITypedStruct structInstance, Vertex instanceVertex,
AttributeInfo attributeInfo, String edgeLabel) throws AtlasException {
// add a new vertex for the struct or trait instance
Vertex structInstanceVertex = graphHelper.createVertexWithoutIdentity(structInstance.getTypeName(), null,
Collections.<String>emptySet()); // no super types for struct type
LOG.debug("created vertex {} for struct {} value {}", structInstanceVertex, attributeInfo.name, structInstance);
// map all the attributes to this new vertex
mapInstanceToVertex(structInstance, structInstanceVertex, structInstance.fieldMapping().fields, false, Operation.CREATE);
// add an edge to the newly created vertex from the parent
Edge relEdge = graphHelper.addEdge(instanceVertex, structInstanceVertex, edgeLabel);
return TypeUtils.Pair.of(structInstanceVertex, relEdge);
}
/******************************************** ARRAY **************************************************/
private void mapArrayCollectionToVertex(ITypedInstance typedInstance, Vertex instanceVertex,
AttributeInfo attributeInfo, Operation operation) throws AtlasException {
LOG.debug("Mapping instance {} to vertex {} for name {}", typedInstance.getTypeName(), instanceVertex,
attributeInfo.name);
List newElements = (List) typedInstance.get(attributeInfo.name);
boolean empty = (newElements == null || newElements.isEmpty());
if (!empty || operation == Operation.UPDATE_FULL || operation == Operation.DELETE) {
String propertyName = GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo);
List<String> currentEntries = instanceVertex.getProperty(propertyName);
String propertyName = GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo);
List<String> currentElements = instanceVertex.getProperty(propertyName);
IDataType elementType = ((DataTypes.ArrayType) attributeInfo.dataType()).getElemType();
List<String> newElementsCreated = new ArrayList<>();
IDataType elementType = ((DataTypes.ArrayType) attributeInfo.dataType()).getElemType();
List<String> newEntries = new ArrayList<>();
if (!newAttributeEmpty) {
if (newElements != null && !newElements.isEmpty()) {
int index = 0;
for (; index < newElements.size(); index++) {
String currentEntry =
(currentEntries != null && index < currentEntries.size()) ? currentEntries.get(index) : null;
String currentElement = (currentElements != null && index < currentElements.size()) ?
currentElements.get(index) : null;
LOG.debug("Adding/updating element at position {}, current element {}, new element {}", index,
currentElement, newElements.get(index));
String newEntry = addOrUpdateCollectionEntry(instanceVertex, attributeInfo, elementType,
newElements.get(index), currentEntry, propertyName, operation);
newEntries.add(newEntry);
newElements.get(index), currentElement, propertyName, operation);
newElementsCreated.add(newEntry);
}
}
}
//Remove extra entries in the list
if (currentEntries != null) {
if (index < currentEntries.size()) {
for (; index < currentEntries.size(); index++) {
if (elementType.getTypeCategory() == TypeCategory.CLASS) {
final String edgeId = currentEntries.get(index);
final Pair<Edge, Vertex> edgeAndTargetVertex = GraphHelper.getInstance().getEdgeAndTargetVertex(edgeId);
Id guid = GraphHelper.getIdFromVertex(elementType.getName(), edgeAndTargetVertex.right);
removeUnusedClassReference(edgeId, attributeInfo, elementType, !referencedIds.contains(guid));
} else if (elementType.getTypeCategory() == TypeCategory.STRUCT) {
removeUnusedStructReference(currentEntries.get(index), attributeInfo, elementType);
}
}
}
}
// for dereference on way out
GraphHelper.setProperty(instanceVertex, propertyName, newElementsCreated);
removeUnusedEntries(currentElements, newElementsCreated, elementType, attributeInfo);
}
private void removeUnusedEntries(List<String> currentEntries, List<String> newEntries, IDataType entryType,
AttributeInfo attributeInfo) throws AtlasException {
if (currentEntries == null || currentEntries.isEmpty()) {
return;
}
LOG.debug("Removing unused entries from the old collection");
if (entryType.getTypeCategory() == DataTypes.TypeCategory.STRUCT
|| entryType.getTypeCategory() == DataTypes.TypeCategory.CLASS) {
//Get map of edge id to edge
Map<String, Edge> edgeMap = new HashMap<>();
getEdges(currentEntries, edgeMap);
getEdges(newEntries, edgeMap);
//Get final set of in vertices
Set<String> newInVertices = new HashSet<>();
for (String edgeId : newEntries) {
Vertex inVertex = edgeMap.get(edgeId).getVertex(Direction.IN);
newInVertices.add(inVertex.getId().toString());
}
else if (operation == Operation.UPDATE_FULL || operation == Operation.DELETE) {
// Clear all existing entries
if (currentEntries != null) {
for (String entry : currentEntries) {
if (elementType.getTypeCategory() == TypeCategory.CLASS) {
removeUnusedClassReference(entry, attributeInfo, elementType, true);
} else if(elementType.getTypeCategory() == TypeCategory.STRUCT) {
removeUnusedStructReference(entry, attributeInfo, elementType);
}
//Remove the edges for (current edges - new edges)
List<String> cloneElements = new ArrayList<>(currentEntries);
cloneElements.removeAll(newEntries);
LOG.debug("Removing unused entries from the old collection - {}", cloneElements);
if (!cloneElements.isEmpty()) {
for (String edgeIdForDelete : cloneElements) {
Edge edge = edgeMap.get(edgeIdForDelete);
Vertex inVertex = edge.getVertex(Direction.IN);
if (newInVertices.contains(inVertex.getId().toString())) {
//If the edge.inVertex is in the new set of in vertices, just delete the edge
deleteHandler.deleteEdge(edge, true);
} else {
//else delete the edge + vertex
deleteHandler.deleteReference(edge, entryType.getTypeCategory(), attributeInfo.isComposite);
}
}
}
}
}
// for dereference on way out
GraphHelper.setProperty(instanceVertex, propertyName, newEntries);
private void getEdges(List<String> edgeIds, Map<String, Edge> edgeMap) {
if (edgeIds == null) {
return;
}
for (String edgeId : edgeIds) {
if (!edgeMap.containsKey(edgeId)) {
edgeMap.put(edgeId, graphHelper.getEdgeById(edgeId));
}
}
}
/******************************************** MAP **************************************************/
private void mapMapCollectionToVertex(ITypedInstance typedInstance, Vertex instanceVertex,
AttributeInfo attributeInfo, Operation operation) throws AtlasException {
LOG.debug("Mapping instance {} to vertex {} for name {}", typedInstance.getTypeName(), instanceVertex,
AttributeInfo attributeInfo, Operation operation) throws AtlasException {
LOG.debug("Mapping instance {} to vertex {} for attribute {}", typedInstance.toShortString(), string(instanceVertex),
attributeInfo.name);
@SuppressWarnings("unchecked") Map<Object, Object> collection =
(Map<Object, Object>) typedInstance.get(attributeInfo.name);
boolean empty = (collection == null || collection.isEmpty());
if (!empty || operation == Operation.UPDATE_FULL || operation == Operation.DELETE) {
@SuppressWarnings("unchecked") Map<Object, Object> newAttribute =
(Map<Object, Object>) typedInstance.get(attributeInfo.name);
String propertyName = GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo);
IDataType elementType = ((DataTypes.MapType) attributeInfo.dataType()).getValueType();
boolean newAttributeEmpty = (newAttribute == null || newAttribute.isEmpty());
if (newAttributeEmpty && operation != Operation.UPDATE_FULL) {
return;
}
if (!empty) {
for (Map.Entry entry : collection.entrySet()) {
String myPropertyName = propertyName + "." + entry.getKey().toString();
IDataType elementType = ((DataTypes.MapType) attributeInfo.dataType()).getValueType();
String propertyName = GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo);
List<String> currentElements = new ArrayList<>();
List<String> newElementsCreated = new ArrayList<>();
List<String> newKeysCreated = new ArrayList<>();
String currentEntry = instanceVertex.getProperty(myPropertyName);
String newEntry = addOrUpdateCollectionEntry(instanceVertex, attributeInfo, elementType,
entry.getValue(), currentEntry, myPropertyName, operation);
if (!newAttributeEmpty) {
for (Map.Entry entry : newAttribute.entrySet()) {
String propertyNameForKey = GraphHelper.getQualifiedNameForMapKey(propertyName, entry.getKey().toString());
newKeysCreated.add(entry.getKey().toString());
//Add/Update/Remove property value
GraphHelper.setProperty(instanceVertex, myPropertyName, newEntry);
}
}
//Remove unused key references
List<Object> origKeys = instanceVertex.getProperty(propertyName);
if (origKeys != null) {
if (collection != null) {
origKeys.removeAll(collection.keySet());
}
for (Object unusedKey : origKeys) {
String edgeLabel = GraphHelper.getEdgeLabel(typedInstance, attributeInfo) + "." + unusedKey;
if (instanceVertex.getEdges(Direction.OUT, edgeLabel).iterator().hasNext()) {
Edge edge = instanceVertex.getEdges(Direction.OUT, edgeLabel).iterator().next();
if (TypeCategory.STRUCT.equals(((DataTypes.MapType) attributeInfo.dataType()).getValueType().getTypeCategory())) {
removeUnusedStructReference(edge.getId().toString(), attributeInfo,
((DataTypes.MapType) attributeInfo.dataType()).getValueType());
} else if(TypeCategory.CLASS.equals(((DataTypes.MapType) attributeInfo.dataType()).getValueType().getTypeCategory())){
final Vertex targetVertex = edge.getVertex(Direction.OUT);
Id guid = GraphHelper.getIdFromVertex(elementType.getName(), targetVertex);
removeUnusedClassReference(edge.getId().toString(), attributeInfo, elementType, !referencedIds.contains(guid)); }
}
String currentEntry = instanceVertex.getProperty(propertyNameForKey);
if (currentEntry != null) {
currentElements.add(currentEntry);
}
}
// for dereference on way out
GraphHelper.setProperty(instanceVertex, propertyName, collection == null ? null : new ArrayList(collection.keySet()));
String newEntry = addOrUpdateCollectionEntry(instanceVertex, attributeInfo, elementType,
entry.getValue(), currentEntry, propertyNameForKey, operation);
//Add/Update/Remove property value
GraphHelper.setProperty(instanceVertex, propertyNameForKey, newEntry);
newElementsCreated.add(newEntry);
}
}
// for dereference on way out
GraphHelper.setProperty(instanceVertex, propertyName, newKeysCreated);
removeUnusedEntries(currentElements, newElementsCreated, elementType, attributeInfo);
}
/******************************************** ARRAY & MAP **************************************************/
private String addOrUpdateCollectionEntry(Vertex instanceVertex, AttributeInfo attributeInfo,
IDataType elementType, Object newVal, String curVal, String propertyName,
Operation operation)
throws AtlasException {
IDataType elementType, Object newAttributeValue, String currentValue,
String propertyName, Operation operation)
throws AtlasException {
final String edgeLabel = GraphHelper.EDGE_LABEL_PREFIX + propertyName;
switch (elementType.getTypeCategory()) {
case PRIMITIVE:
case ENUM:
return newVal != null ? newVal.toString() : null;
return newAttributeValue != null ? newAttributeValue.toString() : null;
case ARRAY:
case MAP:
......@@ -505,67 +470,113 @@ public final class TypedInstanceToGraphMapper {
return null;
case STRUCT:
return addOrUpdateStruct(instanceVertex, attributeInfo, elementType, (ITypedStruct) newVal, curVal, edgeLabel, operation);
case CLASS:
return addOrUpdateClassVertex(instanceVertex, attributeInfo, elementType,
(ITypedReferenceableInstance) newVal, curVal, edgeLabel, operation);
final String edgeLabel = GraphHelper.EDGE_LABEL_PREFIX + propertyName;
Edge currentEdge = graphHelper.getEdgeById(currentValue);
return addOrUpdateReference(instanceVertex, attributeInfo, elementType, newAttributeValue, currentEdge,
edgeLabel, operation);
default:
throw new IllegalArgumentException("Unknown type category: " + elementType.getTypeCategory());
}
}
private String addOrUpdateStruct(Vertex instanceVertex, AttributeInfo attributeInfo, IDataType elementType,
ITypedStruct structAttr, String curVal,
private String addOrUpdateReference(Vertex instanceVertex, AttributeInfo attributeInfo,
IDataType attributeType, Object newAttributeValue, Edge currentEdge,
String edgeLabel, Operation operation) throws AtlasException {
switch (attributeType.getTypeCategory()) {
case STRUCT:
return addOrUpdateStruct(instanceVertex, attributeInfo, (ITypedStruct) newAttributeValue, currentEdge,
edgeLabel, operation);
case CLASS:
return addOrUpdateClassVertex(instanceVertex, currentEdge,
(ITypedReferenceableInstance) newAttributeValue, attributeInfo, edgeLabel);
default:
throw new IllegalArgumentException("Unknown type category: " + attributeType.getTypeCategory());
}
}
/******************************************** STRUCT **************************************************/
private String addOrUpdateStruct(Vertex instanceVertex, AttributeInfo attributeInfo,
ITypedStruct newAttributeValue, Edge currentEdge,
String edgeLabel, Operation operation) throws AtlasException {
TypeUtils.Pair<Vertex, Edge> vertexEdgePair = null;
if (curVal != null && structAttr == null) {
//remove edge
removeUnusedStructReference(curVal, attributeInfo, elementType);
} else if (curVal != null && structAttr != null) {
String newEdgeId = null;
if (currentEdge != null && newAttributeValue != null) {
//update
Edge edge = graphHelper.getOutGoingEdgeById(curVal);
vertexEdgePair = updateStructVertex(structAttr, edge, operation);
} else if (structAttr != null) {
updateStructVertex(newAttributeValue, currentEdge, operation);
newEdgeId = currentEdge.getId().toString();
} else if (currentEdge == null && newAttributeValue != null) {
//add
vertexEdgePair = addStructVertex(structAttr, instanceVertex, attributeInfo, edgeLabel);
Edge newEdge = addStructVertex(newAttributeValue, instanceVertex, attributeInfo, edgeLabel);
newEdgeId = newEdge.getId().toString();
}
return newEdgeId;
}
private Edge addStructVertex(ITypedStruct structInstance, Vertex instanceVertex,
AttributeInfo attributeInfo, String edgeLabel) throws AtlasException {
// add a new vertex for the struct or trait instance
Vertex structInstanceVertex = graphHelper.createVertexWithoutIdentity(structInstance.getTypeName(), null,
Collections.<String>emptySet()); // no super types for struct type
LOG.debug("created vertex {} for struct {} value {}", string(structInstanceVertex), attributeInfo.name,
structInstance.toShortString());
return (vertexEdgePair != null) ? vertexEdgePair.right.getId().toString() : null;
// map all the attributes to this new vertex
mapInstanceToVertex(structInstance, structInstanceVertex, structInstance.fieldMapping().fields, false,
Operation.CREATE);
// add an edge to the newly created vertex from the parent
Edge newEdge = graphHelper.addEdge(instanceVertex, structInstanceVertex, edgeLabel);
return newEdge;
}
private String addOrUpdateClassVertex(Vertex instanceVertex, AttributeInfo attributeInfo, IDataType elementType,
ITypedReferenceableInstance newVal, String curVal,
String edgeLabel, Operation operation) throws AtlasException {
Vertex toVertex = getClassVertex(newVal);
if(toVertex == null && newVal != null) {
LOG.error("Could not find vertex for Class Reference " + newVal);
throw new EntityNotFoundException("Could not find vertex for Class Reference " + newVal);
}
private void updateStructVertex(ITypedStruct newAttributeValue, Edge currentEdge,
Operation operation) throws AtlasException {
//Already existing vertex. Update
Vertex structInstanceVertex = currentEdge.getVertex(Direction.IN);
TypeUtils.Pair<Vertex, Edge> vertexEdgePair = null;
if (curVal != null && newVal == null) {
//remove edge
removeUnusedClassReference(curVal, attributeInfo, elementType, true);
} else if (curVal != null && newVal != null) {
Edge edge = graphHelper.getOutGoingEdgeById(curVal);
Id classRefId = getId(newVal);
vertexEdgePair = updateClassEdge(classRefId, newVal, instanceVertex, edge, toVertex, attributeInfo,
elementType, edgeLabel, operation);
} else if (newVal != null){
vertexEdgePair = addClassEdge(instanceVertex, toVertex, edgeLabel);
}
LOG.debug("Updating struct vertex {} with struct {}", string(structInstanceVertex), newAttributeValue.toShortString());
// Update attributes
final MessageDigest digester = MD5Utils.getDigester();
String newSignature = newAttributeValue.getSignatureHash(digester);
String curSignature = structInstanceVertex.getProperty(SIGNATURE_HASH_PROPERTY_KEY);
return (vertexEdgePair != null) ? vertexEdgePair.right.getId().toString() : null;
if (!newSignature.equals(curSignature)) {
//Update struct vertex instance only if there is a change
LOG.debug("Updating struct {} since signature has changed {} {} ", newAttributeValue, curSignature, newSignature);
mapInstanceToVertex(newAttributeValue, structInstanceVertex, newAttributeValue.fieldMapping().fields, false, operation);
GraphHelper.setProperty(structInstanceVertex, SIGNATURE_HASH_PROPERTY_KEY, String.valueOf(newSignature));
}
}
/******************************************** CLASS **************************************************/
private TypeUtils.Pair<Vertex, Edge> addClassEdge(Vertex instanceVertex, Vertex toVertex, String edgeLabel) throws AtlasException {
// add an edge to the class vertex from the instance
Edge edge = graphHelper.addEdge(instanceVertex, toVertex, edgeLabel);
return TypeUtils.Pair.of(toVertex, edge);
private String addOrUpdateClassVertex(Vertex instanceVertex, Edge currentEdge,
ITypedReferenceableInstance newAttributeValue, AttributeInfo attributeInfo,
String edgeLabel) throws AtlasException {
Vertex newReferenceVertex = getClassVertex(newAttributeValue);
if(newReferenceVertex == null && newAttributeValue != null) {
LOG.error("Could not find vertex for Class Reference " + newAttributeValue);
throw new EntityNotFoundException("Could not find vertex for Class Reference " + newAttributeValue);
}
String newEdgeId = null;
if (currentEdge != null && newAttributeValue != null) {
newEdgeId = updateClassEdge(instanceVertex, currentEdge, newAttributeValue, newReferenceVertex,
attributeInfo, edgeLabel);
} else if (currentEdge == null && newAttributeValue != null){
Edge newEdge = addClassEdge(instanceVertex, newReferenceVertex, edgeLabel);
newEdgeId = newEdge.getId().toString();
}
return newEdgeId;
}
private Edge addClassEdge(Vertex instanceVertex, Vertex toVertex, String edgeLabel) throws AtlasException {
// add an edge to the class vertex from the instance
return graphHelper.addEdge(instanceVertex, toVertex, edgeLabel);
}
private Vertex getClassVertex(ITypedReferenceableInstance typedReference) throws EntityNotFoundException {
......@@ -598,50 +609,36 @@ public final class TypedInstanceToGraphMapper {
}
private TypeUtils.Pair<Vertex, Edge> updateClassEdge(Id id, final ITypedReferenceableInstance typedInstance,
Vertex instanceVertex, Edge edge, Vertex toVertex,
AttributeInfo attributeInfo, IDataType dataType,
String edgeLabel, Operation operation) throws AtlasException {
TypeUtils.Pair<Vertex, Edge> result = TypeUtils.Pair.of(toVertex, edge);
Edge newEdge = edge;
private String updateClassEdge(Vertex instanceVertex, Edge currentEdge,
ITypedReferenceableInstance newAttributeValue,
Vertex newVertex, AttributeInfo attributeInfo,
String edgeLabel) throws AtlasException {
LOG.debug("Updating {} for reference attribute {}", string(currentEdge), attributeInfo.name);
// Update edge if it exists
Vertex invertex = edge.getVertex(Direction.IN);
String currentGUID = invertex.getProperty(Constants.GUID_PROPERTY_KEY);
Id currentId = new Id(currentGUID, 0, (String) invertex.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY));
if (!currentId.equals(id)) {
Vertex currentVertex = currentEdge.getVertex(Direction.IN);
String currentEntityId = GraphHelper.getIdFromVertex(currentVertex);
String newEntityId = getId(newAttributeValue).id;
String newEdgeId = currentEdge.getId().toString();
if (!currentEntityId.equals(newEntityId)) {
// add an edge to the class vertex from the instance
if (toVertex != null) {
newEdge = graphHelper.addEdge(instanceVertex, toVertex, edgeLabel);
result = TypeUtils.Pair.of(toVertex, newEdge);
}
//Delete vertex only if the IdtoVertex map doesnt have it in future references
removeUnusedClassReference(edge.getId().toString(), attributeInfo, dataType, !referencedIds.contains(currentId));
}
if (attributeInfo.isComposite) {
//Update the attributes also if composite
if (typedInstance.fieldMapping() != null) {
//In case of Id instance, fieldMapping is null
mapInstanceToVertex(typedInstance, toVertex, typedInstance.fieldMapping().fields , false, operation);
//Update full text for the updated composite vertex
addFullTextProperty(new ArrayList<ITypedReferenceableInstance>() {{ add(typedInstance); }});
if (newVertex != null) {
Edge newEdge = graphHelper.getOrCreateEdge(instanceVertex, newVertex, edgeLabel);
newEdgeId = newEdge.getId().toString();
}
}
return result;
return newEdgeId;
}
/******************************************** TRAITS ****************************************************/
void mapTraitInstanceToVertex(ITypedStruct traitInstance, IDataType entityType, Vertex parentInstanceVertex)
throws AtlasException {
throws AtlasException {
// add a new vertex for the struct or trait instance
final String traitName = traitInstance.getTypeName();
Vertex traitInstanceVertex = graphHelper.createVertexWithoutIdentity(traitInstance.getTypeName(), null,
typeSystem.getDataType(TraitType.class, traitName).getAllSuperTypeNames());
LOG.debug("created vertex {} for trait {}", traitInstanceVertex, traitName);
LOG.debug("created vertex {} for trait {}", string(traitInstanceVertex), traitName);
// map all the attributes to this newly created vertex
mapInstanceToVertex(traitInstance, traitInstanceVertex, traitInstance.fieldMapping().fields, false, Operation.CREATE);
......@@ -692,314 +689,6 @@ public final class TypedInstanceToGraphMapper {
}
}
GraphHelper.setProperty(instanceVertex, vertexPropertyName, propertyValue);
}
private Edge removeUnusedClassReference(String edgeId, AttributeInfo attributeInfo, IDataType<?> elementType, boolean deleteReferredVertex) throws AtlasException {
// Remove edge to disconnect struct or class reference.
// For struct or composite class reference, also delete the target instance.
Edge removedRelation = null;
TypeUtils.Pair<Edge, Vertex> edgeAndVertex = graphHelper.getEdgeAndTargetVertex(edgeId);
if (attributeInfo.isComposite) {
// For uni-directional reference, remove the edge.
// For bi-directional reference, the edges are removed
// when the composite entity is deleted.
if (attributeInfo.reverseAttributeName == null) {
graphHelper.removeEdge(edgeAndVertex.left);
removedRelation = edgeAndVertex.left;
}
// Delete the contained entity.
if (deleteReferredVertex) {
if (LOG.isDebugEnabled()) {
Vertex sourceVertex = edgeAndVertex.left.getVertex(Direction.OUT);
String sourceTypeName = GraphHelper.getTypeName(sourceVertex);
LOG.debug("Deleting composite entity {}:{} contained by {}:{} through reference {}",
elementType.getName(), GraphHelper.getIdFromVertex(elementType.getName(), edgeAndVertex.right)._getId(),
sourceTypeName, GraphHelper.getIdFromVertex(sourceTypeName, sourceVertex)._getId(),
attributeInfo.name);
}
deleteEntity(elementType.getName(), edgeAndVertex.right);
}
}
else {
if (attributeInfo.reverseAttributeName != null) {
// Disconnect both ends of the bi-directional reference
removeReverseReference(edgeAndVertex, attributeInfo);
}
graphHelper.removeEdge(edgeAndVertex.left);
removedRelation = edgeAndVertex.left;
return removedRelation;
}
return removedRelation;
}
private Edge removeUnusedStructReference(String edgeId, AttributeInfo attributeInfo, IDataType<?> elementType) throws AtlasException {
// Remove edge to disconnect struct or class reference.
// For struct or composite class reference, also delete the target instance.
Edge removedRelation = null;
TypeUtils.Pair<Edge, Vertex> edgeAndVertex = graphHelper.getEdgeAndTargetVertex(edgeId);
graphHelper.removeEdge(edgeAndVertex.left);
removedRelation = edgeAndVertex.left;
// Create an empty instance to use for clearing all struct attributes.
StructType structType = (StructType) elementType;
ITypedStruct typedInstance = structType.createInstance();
// Delete target vertex and any underlying structs and composite entities owned by this struct.
mapInstanceToVertex(typedInstance, edgeAndVertex.right, structType.fieldMapping().fields, false, Operation.DELETE);
return removedRelation;
}
/**
* Remove the reverse reference value for the specified edge and vertex.
*
* @param edgeAndVertex
* @param attributeInfo
* @throws AtlasException
*/
private void removeReverseReference(TypeUtils.Pair<Edge, Vertex> edgeAndVertex,
AttributeInfo attributeInfo) throws AtlasException {
Vertex sourceVertex = edgeAndVertex.left.getVertex(Direction.OUT);
String inverseTypeName = GraphHelper.getTypeName(edgeAndVertex.right);
IConstructableType inverseType = typeSystem.getDataType(IConstructableType.class, inverseTypeName);
AttributeInfo inverseAttributeInfo = inverseType.fieldMapping().fields.get(attributeInfo.reverseAttributeName);
String inverseEdgeLabel = GraphHelper.getEdgeLabel(inverseType, inverseAttributeInfo);
TypeCategory inverseTypeCategory = inverseAttributeInfo.dataType().getTypeCategory();
// Find and remove the edge which represents the inverse reference value.
Iterable<Edge> inverseEdges = GraphHelper.getOutGoingEdgesByLabel(edgeAndVertex.right, inverseEdgeLabel);
Edge removedEdge = null;
// Search for the edge which references the source vertex.
for (Edge edge : inverseEdges) {
Vertex vertex = edge.getVertex(Direction.IN);
if (vertex.equals(sourceVertex)) {
// Found the edge which points back at source vertex.
// Disconnect the reference by removing the edge and
// removing the edge ID from the vertex property.
removeReferenceValue(edge, new AtlasEdgeLabel(edge.getLabel()), edgeAndVertex.right, inverseType, inverseTypeCategory);
removedEdge = edge;
break;
}
}
if (removedEdge != null) {
if (LOG.isDebugEnabled()) {
String sourceTypeName = GraphHelper.getTypeName(sourceVertex);
LOG.debug("Removed edge {} for reverse reference {} from {}:{} to {}:{} ", removedEdge,
GraphHelper.getQualifiedFieldName(inverseType, inverseAttributeInfo.name),
inverseTypeName, GraphHelper.getIdFromVertex(inverseTypeName, edgeAndVertex.right)._getId(),
sourceTypeName, GraphHelper.getIdFromVertex(sourceTypeName, sourceVertex)._getId());
}
}
else {
// We didn't find the edge for the inverse reference.
// Since Atlas currently does not automatically set
// the inverse reference when a reference value is updated,
// unbalanced references are not unexpected.
// The presence of inverse reference values depends on
// well behaved client applications which explicitly set
// both ends of the reference.
// TODO: throw an exception as it indicates a unbalanced reference?
String sourceTypeName = GraphHelper.getTypeName(sourceVertex);
LOG.warn("No edge found for inverse reference {} on vertex {} for entity instance {}:{} which points back to vertex {} for {}:{}",
inverseAttributeInfo.name, edgeAndVertex.right,
inverseTypeName, GraphHelper.getIdFromVertex(inverseTypeName, edgeAndVertex.right)._getId(),
sourceVertex, sourceTypeName, GraphHelper.getIdFromVertex(sourceTypeName, sourceVertex)._getId());
}
}
/**
* Remove any unidirectional map or array reference to a class, struct, or trait vertex.
* This involves removing appropriate value from the vertex property which holds the
* reference values.
*
* @param targetVertex a vertex which represents a class, struct, or trait instance
* @throws AtlasException
*/
private void removeUnidirectionalReferences(Vertex targetVertex) throws AtlasException {
// Search for any remaining incoming edges that represent unidirectional references
// to the target vertex.
Iterable<Edge> incomingEdges = targetVertex.getEdges(Direction.IN);
for (Edge edge : incomingEdges) {
String label = edge.getLabel();
AtlasEdgeLabel atlasEdgeLabel = new AtlasEdgeLabel(label);
Vertex referencingVertex = edge.getVertex(Direction.OUT);
String typeName = atlasEdgeLabel.getTypeName();
IConstructableType referencingType = typeSystem.getDataType(IConstructableType.class, typeName);
AttributeInfo attributeInfo = referencingType.fieldMapping().fields.get(atlasEdgeLabel.getAttributeName());
if (attributeInfo == null) {
String instanceId = getInstanceName(referencingVertex, referencingType);
throw new AtlasException("Outgoing edge " + edge.getId().toString()
+ " for " + instanceId + "(vertex " + referencingVertex + "): label " + label
+ " has an attribute name " + atlasEdgeLabel.getAttributeName() + " that is undefined on "
+ referencingType.getTypeCategory() + " " + typeName);
}
// Remove the appropriate value from the vertex property for this reference.
removeReferenceValue(edge, atlasEdgeLabel, referencingVertex, referencingType, attributeInfo.dataType().getTypeCategory());
}
}
private Pair<String, Boolean> removeReferenceValue(Edge edge, AtlasEdgeLabel atlasEdgeLabel,
Vertex referencingVertex, IConstructableType referencingType, TypeCategory attrTypeCategory)
throws AtlasException {
graphHelper.removeEdge(edge);
if (attrTypeCategory != TypeCategory.ARRAY && attrTypeCategory != TypeCategory.MAP) {
// Multiplicity-one reference is represented by the edge,
// there is no vertex property to update. So just remove the edge.
return new Pair<String, Boolean>(edge.getId().toString(), Boolean.TRUE);
}
List<String> currentRefValues = referencingVertex.getProperty(atlasEdgeLabel.getQualifiedAttributeName());
List<String> newRefValues = new ArrayList<>(currentRefValues);
Pair<String, Boolean> refValueRemoved = null;
if (attrTypeCategory == TypeCategory.ARRAY) {
refValueRemoved = removeArrayReferenceValue(atlasEdgeLabel, referencingVertex, edge, newRefValues);
}
else {
refValueRemoved = removeMapReferenceValue(atlasEdgeLabel, referencingVertex, edge, newRefValues);
}
if (refValueRemoved.right) {
if (LOG.isDebugEnabled()) {
String instanceId = getInstanceName(referencingVertex, referencingType);
LOG.debug("Reference value {} removed from reference {} on vertex {} for instance of {} {}",
refValueRemoved.left, atlasEdgeLabel.getAttributeName(), referencingVertex,
referencingType.getTypeCategory(), instanceId);
}
// If the referencing instance is an entity, update the modification timestamp.
if (referencingType instanceof ClassType) {
GraphHelper.setProperty(referencingVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, System.currentTimeMillis());
}
}
else {
// The expected value is missing from the reference property values - log a warning.
String instanceId = getInstanceName(referencingVertex, referencingType);
LOG.warn("Reference value {} expected but not found in array reference {} on vertex {} for instance of {} {}",
refValueRemoved.left, atlasEdgeLabel.getAttributeName(), referencingVertex,
referencingType.getTypeCategory(), instanceId);
}
return refValueRemoved;
}
private TypeUtils.Pair<String, Boolean> removeArrayReferenceValue(AtlasEdgeLabel atlasEdgeLabel, Vertex referencingVertex,
Edge edge, List<String> newRefValues) {
String refValueToRemove = edge.getId().toString();
boolean valueRemoved = newRefValues.remove(refValueToRemove);
if (valueRemoved) {
GraphHelper.setProperty(referencingVertex, atlasEdgeLabel.getQualifiedAttributeName(), newRefValues);
}
return new TypeUtils.Pair<String, Boolean>(refValueToRemove, Boolean.valueOf(valueRemoved));
}
private TypeUtils.Pair<String, Boolean> removeMapReferenceValue(AtlasEdgeLabel atlasEdgeLabel, Vertex referencingVertex,
Edge edge, List<String> newRefValues) throws AtlasException {
String refValueToRemove = atlasEdgeLabel.getMapKey();
if (refValueToRemove == null) {
// Edge label is missing the map key - throw an exception.
String typeName = atlasEdgeLabel.getTypeName();
throw new AtlasException("Outgoing edge " + edge.getId().toString()
+ " for vertex " + referencingVertex + "): label " + atlasEdgeLabel.getEdgeLabel()
+ " for map attribute " + atlasEdgeLabel.getAttributeName() + " on type "
+ typeName + " is missing the map key");
}
boolean valueRemoved = newRefValues.remove(refValueToRemove);
if (valueRemoved) {
GraphHelper.setProperty(referencingVertex, atlasEdgeLabel.getQualifiedAttributeName(), newRefValues);
// For maps, also remove the key-value pair property value.
GraphHelper.setProperty(referencingVertex, atlasEdgeLabel.getQualifiedMapKey(), null);
}
return new TypeUtils.Pair<String, Boolean>(refValueToRemove, Boolean.valueOf(valueRemoved));
}
void deleteEntity(String typeName, Vertex instanceVertex) throws AtlasException {
// Check if this entity has already been processed.
Id id = GraphHelper.getIdFromVertex(typeName, instanceVertex);
if (deletedEntityGuids.contains(id._getId())) {
return;
}
deletedEntityGuids.add(id._getId());
// Remove traits owned by this entity.
deleteAllTraits(instanceVertex);
// Create an empty instance to use for clearing all attributes.
ClassType classType = typeSystem.getDataType(ClassType.class, typeName);
ITypedReferenceableInstance typedInstance = classType.createInstance(id);
// Remove any underlying structs and composite entities owned by this entity.
mapInstanceToVertex(typedInstance, instanceVertex, classType.fieldMapping().fields, false, Operation.DELETE);
deletedEntities.add(typedInstance);
}
/**
* Delete all traits from the specified vertex.
*
* @param instanceVertex
* @throws AtlasException
*/
private void deleteAllTraits(Vertex instanceVertex) throws AtlasException {
List<String> traitNames = GraphHelper.getTraitNames(instanceVertex);
final String entityTypeName = GraphHelper.getTypeName(instanceVertex);
for (String traitNameToBeDeleted : traitNames) {
String relationshipLabel = GraphHelper.getTraitLabel(entityTypeName, traitNameToBeDeleted);
Iterator<Edge> results = instanceVertex.getEdges(Direction.OUT, relationshipLabel).iterator();
if (results.hasNext()) { // there should only be one edge for this label
final Edge traitEdge = results.next();
final Vertex traitVertex = traitEdge.getVertex(Direction.IN);
// remove the edge to the trait instance from the repository
graphHelper.removeEdge(traitEdge);
if (traitVertex != null) { // remove the trait instance from the repository
deleteTraitVertex(traitNameToBeDeleted, traitVertex);
}
}
}
}
void deleteTraitVertex(String traitName, final Vertex traitVertex) throws AtlasException {
TraitType traitType = typeSystem.getDataType(TraitType.class, traitName);
ITypedStruct traitStruct = traitType.createInstance();
// Remove trait vertex along with any struct and class attributes owned by this trait.
mapInstanceToVertex(traitStruct, traitVertex, traitType.fieldMapping().fields, false, Operation.DELETE);
}
/**
* Get the GUIDs of entities that have been deleted.
*
* @return
*/
List<String> getDeletedEntityGuids() {
if (deletedEntityGuids.size() == 0) {
return Collections.emptyList();
}
else {
return Collections.unmodifiableList(deletedEntityGuids);
}
}
/**
* Get the entities that have been deleted.
*
* @return
*/
List<ITypedReferenceableInstance> getDeletedEntities() {
if (deletedEntities.size() == 0) {
return Collections.emptyList();
}
else {
return Collections.unmodifiableList(deletedEntities);
}
}
}
......@@ -206,9 +206,10 @@ public class GraphBackedTypeStore implements ITypeStore {
}
private void addEdge(Vertex fromVertex, Vertex toVertex, String label) {
Iterable<Edge> edges = GraphHelper.getOutGoingEdgesByLabel(fromVertex, label);
Iterator<Edge> edges = GraphHelper.getOutGoingEdgesByLabel(fromVertex, label);
// ATLAS-474: Check if this type system edge already exists, to avoid duplicates.
for (Edge edge : edges) {
while (edges.hasNext()) {
Edge edge = edges.next();
if (edge.getVertex(Direction.IN).equals(toVertex)) {
LOG.debug("Edge from {} to {} with label {} already exists",
toString(fromVertex), toString(toVertex), label);
......
......@@ -69,6 +69,7 @@ public class BaseHiveRepositoryTest {
protected void setUp() throws Exception {
setUpTypes();
new GraphBackedSearchIndexer(graphProvider);
RequestContext.createContext();
setupInstances();
TestUtils.dumpGraph(graphProvider.get());
}
......
......@@ -21,14 +21,12 @@ package org.apache.atlas;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.thinkaurelius.titan.core.TitanGraph;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.util.io.graphson.GraphSONWriter;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.TypesDef;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.AttributeDefinition;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.DataTypes;
......@@ -46,12 +44,14 @@ import org.testng.Assert;
import java.io.File;
import java.util.Collection;
import java.util.Date;
import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
import static org.apache.atlas.typesystem.types.utils.TypesUtil.createStructTypeDef;
import static org.apache.atlas.typesystem.types.utils.TypesUtil.createTraitTypeDef;
import static org.apache.atlas.typesystem.types.utils.TypesUtil.createUniqueRequiredAttrDef;
/**
* Test utility class.
......@@ -75,16 +75,7 @@ public final class TestUtils {
System.out.println("tempFile.getPath() = " + tempFile.getPath());
GraphSONWriter.outputGraph(titanGraph, tempFile.getPath());
System.out.println("Vertices:");
for (Vertex vertex : titanGraph.getVertices()) {
System.out.println(GraphHelper.vertexString(vertex));
}
System.out.println("Edges:");
for (Edge edge : titanGraph.getEdges()) {
System.out.println(GraphHelper.edgeString(edge));
}
GraphHelper.dumpToLog(titanGraph);
return tempFile.getPath();
}
......@@ -106,9 +97,9 @@ public final class TestUtils {
createStructTypeDef("Address", "Address"+_description, createRequiredAttrDef("street", DataTypes.STRING_TYPE),
createRequiredAttrDef("city", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department", "Department"+_description, ImmutableSet.<String>of(),
HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef(DEPARTMENT_TYPE, "Department"+_description, ImmutableSet.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.COLLECTION,
new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.OPTIONAL,
true, "department"));
HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person", "Person"+_description, ImmutableSet.<String>of(),
......@@ -132,9 +123,13 @@ public final class TestUtils {
ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
}
public static Referenceable createDeptEg1(TypeSystem ts) throws AtlasException {
Referenceable hrDept = new Referenceable(ENTITY_TYPE);
Referenceable john = new Referenceable("Person");
public static final String DEPARTMENT_TYPE = "Department";
public static final String PERSON_TYPE = "Person";
public static ITypedReferenceableInstance createDeptEg1(TypeSystem ts) throws AtlasException {
Referenceable hrDept = new Referenceable(DEPARTMENT_TYPE);
Referenceable john = new Referenceable(PERSON_TYPE);
Referenceable jane = new Referenceable("Manager", "SecurityClearance");
Referenceable johnAddr = new Referenceable("Address");
Referenceable janeAddr = new Referenceable("Address");
......@@ -183,13 +178,13 @@ public final class TestUtils {
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
Assert.assertNotNull(hrDept2);
return hrDept;
return hrDept2;
}
public static final String ENTITY_TYPE = "Department";
public static final String DATABASE_TYPE = "hive_database";
public static final String DATABASE_NAME = "foo";
public static final String TABLE_TYPE = "hive_table";
public static final String PROCESS_TYPE = "hive_process";
public static final String COLUMN_TYPE = "column_type";
public static final String TABLE_NAME = "bar";
public static final String CLASSIFICATION = "classification";
......@@ -200,6 +195,9 @@ public final class TestUtils {
public static final String PARTITION_CLASS_TYPE = "partition_class_type";
public static final String SERDE_TYPE = "serdeType";
public static final String COLUMNS_MAP = "columnsMap";
public static final String COLUMNS_ATTR_NAME = "columns";
public static final String NAME = "name";
public static TypesDef defineHiveTypes() {
String _description = "_description";
......@@ -211,7 +209,7 @@ public final class TestUtils {
HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
createClassTypeDef(DATABASE_TYPE, DATABASE_TYPE + _description,ImmutableSet.of(SUPER_TYPE_NAME),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
createOptionalAttrDef("created", DataTypes.DATE_TYPE),
createRequiredAttrDef("description", DataTypes.STRING_TYPE));
......@@ -227,7 +225,7 @@ public final class TestUtils {
HierarchicalTypeDefinition<ClassType> columnsDefinition =
createClassTypeDef(COLUMN_TYPE, ImmutableSet.<String>of(),
createRequiredAttrDef("name", DataTypes.STRING_TYPE),
createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
createRequiredAttrDef("type", DataTypes.STRING_TYPE));
StructTypeDefinition partitionDefinition = new StructTypeDefinition("partition_struct_type", "partition_struct_type" + _description,
......@@ -268,6 +266,12 @@ public final class TestUtils {
new HierarchicalTypeDefinition<>(ClassType.class, "partition_class_type", "partition_class_type" + _description,
ImmutableSet.of(SUPER_TYPE_NAME), partClsAttributes);
HierarchicalTypeDefinition<ClassType> processClsType =
new HierarchicalTypeDefinition<>(ClassType.class, PROCESS_TYPE, PROCESS_TYPE + _description,
ImmutableSet.<String>of(), new AttributeDefinition[]{
new AttributeDefinition("outputs", "array<" + TABLE_TYPE + ">", Multiplicity.OPTIONAL, false, null)
});
HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
createClassTypeDef(TABLE_TYPE, TABLE_TYPE + _description, ImmutableSet.of(SUPER_TYPE_NAME),
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
......@@ -322,7 +326,8 @@ public final class TestUtils {
return TypesUtil.getTypesDef(ImmutableList.of(enumTypeDefinition),
ImmutableList.of(structTypeDefinition, partitionDefinition),
ImmutableList.of(classificationTypeDefinition, fetlClassificationTypeDefinition, piiTypeDefinition),
ImmutableList.of(superTypeDefinition, databaseTypeDefinition, columnsDefinition, tableTypeDefinition, storageDescClsDef, partClsDef));
ImmutableList.of(superTypeDefinition, databaseTypeDefinition, columnsDefinition, tableTypeDefinition,
storageDescClsDef, partClsDef, processClsType));
}
public static Collection<IDataType> createHiveTypes(TypeSystem typeSystem) throws Exception {
......@@ -336,4 +341,31 @@ public final class TestUtils {
public static final String randomString() {
return RandomStringUtils.randomAlphanumeric(10);
}
public static Referenceable createDBEntity() {
Referenceable entity = new Referenceable(DATABASE_TYPE);
String dbName = RandomStringUtils.randomAlphanumeric(10);
entity.set(NAME, dbName);
entity.set("description", "us db");
return entity;
}
public static Referenceable createTableEntity(String dbId) {
Referenceable entity = new Referenceable(TABLE_TYPE);
String tableName = RandomStringUtils.randomAlphanumeric(10);
entity.set(NAME, tableName);
entity.set("description", "random table");
entity.set("type", "type");
entity.set("tableType", "MANAGED");
entity.set("database", new Id(dbId, 0, DATABASE_TYPE));
entity.set("created", new Date());
return entity;
}
public static Referenceable createColumnEntity() {
Referenceable entity = new Referenceable(COLUMN_TYPE);
entity.set(NAME, RandomStringUtils.randomAlphanumeric(10));
entity.set("type", "VARCHAR(32)");
return entity;
}
}
......@@ -21,6 +21,7 @@ package org.apache.atlas.discovery;
import com.google.common.collect.ImmutableSet;
import org.apache.atlas.BaseHiveRepositoryTest;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.RequestContext;
import org.apache.atlas.TestUtils;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.repository.Constants;
......@@ -38,6 +39,7 @@ import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
......@@ -67,11 +69,8 @@ public class GraphBackedDiscoveryServiceTest extends BaseHiveRepositoryTest {
TypeSystem typeSystem = TypeSystem.getInstance();
TestUtils.defineDeptEmployeeTypes(typeSystem);
Referenceable hrDept = TestUtils.createDeptEg1(typeSystem);
ClassType deptType = typeSystem.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
repositoryService.createEntities(hrDept2);
ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(typeSystem);
repositoryService.createEntities(hrDept);
ITypedReferenceableInstance jane = repositoryService.getEntityDefinition("Person", "name", "Jane");
Id janeGuid = jane.getId();
......@@ -81,6 +80,11 @@ public class GraphBackedDiscoveryServiceTest extends BaseHiveRepositoryTest {
repositoryService.updateEntities(instance);
}
@BeforeMethod
public void setupContext() {
RequestContext.createContext();
}
@AfterClass
public void tearDown() throws Exception {
super.tearDown();
......
......@@ -23,12 +23,13 @@ import com.google.common.collect.ImmutableSet;
import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.util.TitanCleanup;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.RequestContext;
import org.apache.atlas.TestUtils;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.RepositoryException;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.ITypedStruct;
......@@ -46,22 +47,37 @@ import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.StructTypeDefinition;
import org.apache.atlas.typesystem.types.TraitType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.typesystem.types.TypeUtils.Pair;
import org.apache.atlas.typesystem.types.TypeUtils;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.apache.atlas.TestUtils.COLUMNS_ATTR_NAME;
import static org.apache.atlas.TestUtils.COLUMN_TYPE;
import static org.apache.atlas.TestUtils.NAME;
import static org.apache.atlas.TestUtils.PROCESS_TYPE;
import static org.apache.atlas.TestUtils.TABLE_TYPE;
import static org.apache.atlas.TestUtils.createColumnEntity;
import static org.apache.atlas.TestUtils.createDBEntity;
import static org.apache.atlas.TestUtils.createTableEntity;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
/**
* Test for GraphBackedMetadataRepository.deleteEntities
*
......@@ -69,16 +85,12 @@ import java.util.Map;
*
*/
@Guice(modules = RepositoryMetadataModule.class)
public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
public abstract class GraphBackedMetadataRepositoryDeleteTestBase {
@Inject
private GraphProvider<TitanGraph> graphProvider;
@Inject
private GraphBackedMetadataRepository repositoryService;
@Inject
private GraphBackedDiscoveryService discoveryService;
protected GraphBackedMetadataRepository repositoryService;
private TypeSystem typeSystem;
......@@ -89,10 +101,18 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
new GraphBackedSearchIndexer(graphProvider);
repositoryService = new GraphBackedMetadataRepository(graphProvider, getDeleteHandler(typeSystem));
TestUtils.defineDeptEmployeeTypes(typeSystem);
TestUtils.createHiveTypes(typeSystem);
}
abstract DeleteHandler getDeleteHandler(TypeSystem typeSystem);
@BeforeMethod
public void setupContext() {
RequestContext.createContext();
}
@AfterClass
public void tearDown() throws Exception {
......@@ -109,7 +129,124 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
}
}
@Test
public void testDeleteAndCreate() throws Exception {
Referenceable entity = createDBEntity();
String id = createInstance(entity);
//get entity by unique attribute should return the created entity
ITypedReferenceableInstance instance =
repositoryService.getEntityDefinition(TestUtils.DATABASE_TYPE, "name", entity.get("name"));
assertEquals(instance.getId()._getId(), id);
//delete entity should mark it as deleted
List<String> results = deleteEntities(id);
assertEquals(results.get(0), id);
assertEntityDeleted(id);
//get entity by unique attribute should throw EntityNotFoundException
try {
repositoryService.getEntityDefinition(TestUtils.DATABASE_TYPE, "name", entity.get("name"));
fail("Expected EntityNotFoundException");
} catch(EntityNotFoundException e) {
//expected
}
//Create the same entity again, should create new entity
String newId = createInstance(entity);
assertNotEquals(id, newId);
//get by unique attribute should return the new entity
instance = repositoryService.getEntityDefinition(TestUtils.DATABASE_TYPE, "name", entity.get("name"));
assertEquals(instance.getId()._getId(), newId);
}
@Test
public void testDeleteReference() throws Exception {
//Deleting column should update table
Referenceable db = createDBEntity();
String dbId = createInstance(db);
Referenceable column = createColumnEntity();
String colId = createInstance(column);
Referenceable table = createTableEntity(dbId);
table.set(COLUMNS_ATTR_NAME, Arrays.asList(new Id(colId, 0, COLUMN_TYPE)));
String tableId = createInstance(table);
deleteEntities(colId);
assertEntityDeleted(colId);
ITypedReferenceableInstance tableInstance = repositoryService.getEntityDefinition(tableId);
List<ITypedReferenceableInstance> columns =
(List<ITypedReferenceableInstance>) tableInstance.get(COLUMNS_ATTR_NAME);
assertNull(columns);
//Deleting table should update process
Referenceable process = new Referenceable(PROCESS_TYPE);
process.set(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS, Arrays.asList(new Id(tableId, 0, TABLE_TYPE)));
String processId = createInstance(process);
ITypedReferenceableInstance processInstance = repositoryService.getEntityDefinition(processId);
deleteEntities(tableId);
assertEntityDeleted(tableId);
assertTestDeleteReference(processInstance);
}
protected abstract void assertTestDeleteReference(ITypedReferenceableInstance processInstance) throws Exception;
protected abstract void assertEntityDeleted(String id) throws Exception;
private List<String> deleteEntities(String... id) throws Exception {
RequestContext.createContext();
List<String> response = repositoryService.deleteEntities(Arrays.asList(id)).left;
assertNotNull(response);
return response;
}
private String createInstance(Referenceable entity) throws Exception {
ClassType dataType = typeSystem.getDataType(ClassType.class, entity.getTypeName());
ITypedReferenceableInstance instance = dataType.convert(entity, Multiplicity.REQUIRED);
List<String> results = repositoryService.createEntities(instance);
return results.get(results.size() - 1);
}
@Test
public void testDeleteEntities() throws Exception {
// Create a table entity, with 3 composite column entities
Referenceable dbEntity = createDBEntity();
String dbGuid = createInstance(dbEntity);
Referenceable table1Entity = createTableEntity(dbGuid);
Referenceable col1 = createColumnEntity();
Referenceable col2 = createColumnEntity();
Referenceable col3 = createColumnEntity();
table1Entity.set(COLUMNS_ATTR_NAME, ImmutableList.of(col1, col2, col3));
createInstance(table1Entity);
// Retrieve the table entities from the auditRepository,
// to get their guids and the composite column guids.
ITypedReferenceableInstance tableInstance = repositoryService.getEntityDefinition(TestUtils.TABLE_TYPE,
NAME, table1Entity.get(NAME));
List<IReferenceableInstance> table1Columns = (List<IReferenceableInstance>) tableInstance.get(COLUMNS_ATTR_NAME);
// Delete the table entities. The deletion should cascade
// to their composite columns.
List<String> deletedGuids = deleteEntities(tableInstance.getId()._getId());
// Verify that deleteEntities() response has guids for tables and their composite columns.
Assert.assertTrue(deletedGuids.contains(tableInstance.getId()._getId()));
for (IReferenceableInstance column : table1Columns) {
Assert.assertTrue(deletedGuids.contains(column.getId()._getId()));
}
// Verify that tables and their composite columns have been deleted from the graph Repository.
for (String guid : deletedGuids) {
assertEntityDeleted(guid);
}
assertTestDeleteEntities(tableInstance);
}
protected abstract void assertTestDeleteEntities(ITypedReferenceableInstance tableInstance) throws Exception;
/**
* Verify deleting entities with composite references to other entities.
......@@ -120,42 +257,38 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
String hrDeptGuid = createHrDeptGraph();
ITypedReferenceableInstance hrDept = repositoryService.getEntityDefinition(hrDeptGuid);
Object refValue = hrDept.get("employees");
Assert.assertTrue(refValue instanceof List);
List<Object> employees = (List<Object>)refValue;
List<ITypedReferenceableInstance> employees = (List<ITypedReferenceableInstance>) hrDept.get("employees");
Assert.assertEquals(employees.size(), 4);
List<String> employeeGuids = new ArrayList(4);
for (Object listValue : employees) {
Assert.assertTrue(listValue instanceof ITypedReferenceableInstance);
ITypedReferenceableInstance employee = (ITypedReferenceableInstance) listValue;
for (ITypedReferenceableInstance employee : employees) {
employeeGuids.add(employee.getId()._getId());
}
// There should be 4 vertices for Address structs (one for each Person.address attribute value).
int vertexCount = countVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "Address");
int vertexCount = getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "Address").size();
Assert.assertEquals(vertexCount, 4);
vertexCount = countVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "SecurityClearance");
vertexCount = getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "SecurityClearance").size();
Assert.assertEquals(vertexCount, 1);
Pair<List<String>, List<ITypedReferenceableInstance>> deletedEntities = repositoryService.deleteEntities(Arrays.asList(hrDeptGuid));
Assert.assertTrue(deletedEntities.left.contains(hrDeptGuid));
List<String> deletedEntities = deleteEntities(hrDeptGuid);
assertTrue(deletedEntities.contains(hrDeptGuid));
// Verify Department entity and its contained Person entities were deleted.
verifyEntityDoesNotExist(hrDeptGuid);
assertEntityDeleted(hrDeptGuid);
for (String employeeGuid : employeeGuids) {
verifyEntityDoesNotExist(employeeGuid);
assertEntityDeleted(employeeGuid);
}
// Verify all Person.address struct vertices were removed.
vertexCount = countVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "Address");
Assert.assertEquals(vertexCount, 0);
assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "Address"));
// Verify all SecurityClearance trait vertices were removed.
vertexCount = countVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "SecurityClearance");
Assert.assertEquals(vertexCount, 0);
assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "SecurityClearance"));
}
protected abstract void assertVerticesDeleted(List<Vertex> vertices);
@Test
public void testDeleteEntitiesWithCompositeMapReference() throws Exception {
// Define type for map value.
......@@ -202,22 +335,95 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
Vertex mapOwnerVertex = GraphHelper.getInstance().getVertexForGUID(mapOwnerGuid);
object = mapOwnerVertex.getProperty(atlasEdgeLabel.getQualifiedMapKey());
Assert.assertNotNull(object);
Pair<List<String>, List<ITypedReferenceableInstance>> deleteEntitiesResult =
repositoryService.deleteEntities(Arrays.asList(mapOwnerGuid));
Assert.assertEquals(deleteEntitiesResult.left.size(), 2);
Assert.assertTrue(deleteEntitiesResult.left.containsAll(guids));
verifyEntityDoesNotExist(mapOwnerGuid);
verifyEntityDoesNotExist(mapValueGuid);
List<String> deletedEntities = deleteEntities(mapOwnerGuid);
Assert.assertEquals(deletedEntities.size(), 2);
Assert.assertTrue(deletedEntities.containsAll(guids));
assertEntityDeleted(mapOwnerGuid);
assertEntityDeleted(mapValueGuid);
}
private TypeUtils.Pair<List<String>, List<String>> updatePartial(ITypedReferenceableInstance entity) throws RepositoryException {
RequestContext.createContext();
return repositoryService.updatePartial(entity);
}
@Test
public void testUpdateEntity_MultiplicityOneNonCompositeReference() throws Exception {
ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(typeSystem);
repositoryService.createEntities(hrDept);
ITypedReferenceableInstance john = repositoryService.getEntityDefinition("Person", "name", "John");
Id johnGuid = john.getId();
ITypedReferenceableInstance max = repositoryService.getEntityDefinition("Person", "name", "Max");
String maxGuid = max.getId()._getId();
Vertex vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
Long creationTimestamp = vertex.getProperty(Constants.TIMESTAMP_PROPERTY_KEY);
Assert.assertNotNull(creationTimestamp);
Long modificationTimestampPreUpdate = vertex.getProperty(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY);
Assert.assertNotNull(modificationTimestampPreUpdate);
ITypedReferenceableInstance jane = repositoryService.getEntityDefinition("Person", "name", "Jane");
Id janeGuid = jane.getId();
// Update max's mentor reference to john.
ClassType personType = typeSystem.getDataType(ClassType.class, "Person");
ITypedReferenceableInstance maxEntity = personType.createInstance(max.getId());
maxEntity.set("mentor", johnGuid);
updatePartial(maxEntity);
// Verify the update was applied correctly - john should now be max's mentor.
max = repositoryService.getEntityDefinition(maxGuid);
ITypedReferenceableInstance refTarget = (ITypedReferenceableInstance) max.get("mentor");
Assert.assertEquals(refTarget.getId()._getId(), johnGuid._getId());
// Verify modification timestamp was updated.
vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
Long modificationTimestampPostUpdate = vertex.getProperty(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY);
Assert.assertNotNull(modificationTimestampPostUpdate);
Assert.assertTrue(creationTimestamp < modificationTimestampPostUpdate);
// Update max's mentor reference to jane.
maxEntity.set("mentor", janeGuid);
updatePartial(maxEntity);
// Verify the update was applied correctly - jane should now be max's mentor.
max = repositoryService.getEntityDefinition(maxGuid);
refTarget = (ITypedReferenceableInstance) max.get("mentor");
Assert.assertEquals(refTarget.getId()._getId(), janeGuid._getId());
// Verify modification timestamp was updated.
vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
Long modificationTimestampPost2ndUpdate = vertex.getProperty(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY);
Assert.assertNotNull(modificationTimestampPost2ndUpdate);
Assert.assertTrue(modificationTimestampPostUpdate < modificationTimestampPost2ndUpdate);
ITypedReferenceableInstance julius = repositoryService.getEntityDefinition("Person", "name", "Julius");
Id juliusGuid = julius.getId();
maxEntity = personType.createInstance(max.getId());
maxEntity.set("manager", juliusGuid);
updatePartial(maxEntity);
// Verify the update was applied correctly - julius should now be max's manager.
max = repositoryService.getEntityDefinition(maxGuid);
refTarget = (ITypedReferenceableInstance) max.get("manager");
Assert.assertEquals(refTarget.getId()._getId(), juliusGuid._getId());
assertTestUpdateEntity_MultiplicityOneNonCompositeReference();
}
protected abstract void assertTestUpdateEntity_MultiplicityOneNonCompositeReference() throws Exception;
/**
* Verify deleting an entity which is contained by another
* entity through a bi-directional composite reference.
*
* @throws Exception
*/
@Test(dependsOnMethods = "testDeleteEntitiesWithCompositeArrayReference")
@Test
public void testDisconnectBidirectionalReferences() throws Exception {
String hrDeptGuid = createHrDeptGraph();
ITypedReferenceableInstance hrDept = repositoryService.getEntityDefinition(hrDeptGuid);
......@@ -225,15 +431,15 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
Assert.assertTrue(refValue instanceof List);
List<Object> employees = (List<Object>)refValue;
Assert.assertEquals(employees.size(), 4);
String employeeGuid = null;
String maxGuid = null;
for (Object listValue : employees) {
Assert.assertTrue(listValue instanceof ITypedReferenceableInstance);
ITypedReferenceableInstance employee = (ITypedReferenceableInstance) listValue;
if (employee.get("name").equals("Max")) {
employeeGuid = employee.getId()._getId();
maxGuid = employee.getId()._getId();
}
}
Assert.assertNotNull(employeeGuid);
Assert.assertNotNull(maxGuid);
// Verify that Max is one of Jane's subordinates.
ITypedReferenceableInstance jane = repositoryService.getEntityDefinition("Manager", "name", "Jane");
......@@ -247,11 +453,11 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
ITypedReferenceableInstance employee = (ITypedReferenceableInstance) listValue;
subordinateIds.add(employee.getId()._getId());
}
Assert.assertTrue(subordinateIds.contains(employeeGuid));
Pair<List<String>, List<ITypedReferenceableInstance>> deletedEntities = repositoryService.deleteEntities(Arrays.asList(employeeGuid));
Assert.assertTrue(deletedEntities.left.contains(employeeGuid));
verifyEntityDoesNotExist(employeeGuid);
Assert.assertTrue(subordinateIds.contains(maxGuid));
List<String> deletedEntities = deleteEntities(maxGuid);
Assert.assertTrue(deletedEntities.contains(maxGuid));
assertEntityDeleted(maxGuid);
// Verify that the Department.employees reference to the deleted employee
// was disconnected.
......@@ -263,37 +469,29 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
for (Object listValue : employees) {
Assert.assertTrue(listValue instanceof ITypedReferenceableInstance);
ITypedReferenceableInstance employee = (ITypedReferenceableInstance) listValue;
Assert.assertNotEquals(employee.getId()._getId(), employeeGuid);
Assert.assertNotEquals(employee.getId()._getId(), maxGuid);
}
// Verify that the Manager.subordinates reference to the deleted employee
// Max was disconnected.
jane = repositoryService.getEntityDefinition("Manager", "name", "Jane");
refValue = jane.get("subordinates");
Assert.assertTrue(refValue instanceof List);
subordinates = (List<Object>)refValue;
Assert.assertEquals(subordinates.size(), 1);
Object listValue = subordinates.get(0);
Assert.assertTrue(listValue instanceof ITypedReferenceableInstance);
ITypedReferenceableInstance subordinate = (ITypedReferenceableInstance) listValue;
String subordinateGuid = subordinate.getId()._getId();
Assert.assertNotEquals(subordinateGuid, employeeGuid);
// Verify that max's Person.mentor unidirectional reference to john was disconnected.
ITypedReferenceableInstance john = repositoryService.getEntityDefinition("Manager", "name", "John");
refValue = john.get("mentor");
Assert.assertNull(refValue);
assertTestDisconnectBidirectionalReferences();
// Now delete jane - this should disconnect the manager reference from her
// subordinate.
String janeGuid = jane.getId()._getId();
deletedEntities = repositoryService.deleteEntities(Arrays.asList(janeGuid));
Assert.assertTrue(deletedEntities.left.contains(janeGuid));
verifyEntityDoesNotExist(janeGuid);
subordinate = repositoryService.getEntityDefinition(subordinateGuid);
Assert.assertNull(subordinate.get("manager"));
deletedEntities = deleteEntities(janeGuid);
Assert.assertTrue(deletedEntities.contains(janeGuid));
assertEntityDeleted(janeGuid);
john = repositoryService.getEntityDefinition("Person", "name", "John");
Assert.assertNull(john.get("manager"));
}
protected abstract void assertTestDisconnectBidirectionalReferences() throws Exception;
/**
* Verify deleting entity that is the target of a unidirectional class array reference
* from a class instance.
......@@ -314,11 +512,9 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
String columnGuid = column.getId()._getId();
// Delete the column.
Pair<List<String>, List<ITypedReferenceableInstance>> deletedEntities =
repositoryService.deleteEntities(Arrays.asList(columnGuid));
Assert.assertEquals(deletedEntities.left.size(), 1);
Assert.assertEquals(deletedEntities.right.size(), 1);
verifyEntityDoesNotExist(columnGuid);
List<String> deletedEntities = deleteEntities(columnGuid);
Assert.assertTrue(deletedEntities.contains(columnGuid));
assertEntityDeleted(columnGuid);
// Verify table.columns reference to the deleted column has been disconnected.
table = repositoryService.getEntityDefinition(tableGuid);
......@@ -427,45 +623,33 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
Assert.assertEquals(refList.get(0).getId()._getId(), traitTargetGuid);
// Delete the entities that are targets of the struct and trait instances.
Pair<List<String>, List<ITypedReferenceableInstance>> deleteEntitiesResult =
repositoryService.deleteEntities(Arrays.asList(structTargetGuid, traitTargetGuid));
verifyEntityDoesNotExist(structTargetGuid);
verifyEntityDoesNotExist(traitTargetGuid);
Assert.assertEquals(deleteEntitiesResult.left.size(), 2);
Assert.assertTrue(deleteEntitiesResult.left.containsAll(Arrays.asList(structTargetGuid, traitTargetGuid)));
// Verify that the unidirectional references from the struct and trait instances
// to the deleted entities were disconnected.
structContainerConvertedEntity = repositoryService.getEntityDefinition(structContainerGuid);
object = structContainerConvertedEntity.get("struct");
Assert.assertNotNull(object);
Assert.assertTrue(object instanceof ITypedStruct);
struct = (ITypedStruct) object;
Assert.assertNull(struct.get("target"));
trait = structContainerConvertedEntity.getTrait("TestTrait");
Assert.assertNotNull(trait);
Assert.assertNull(trait.get("target"));
List<String> deletedEntities = deleteEntities(structTargetGuid, traitTargetGuid);
assertEntityDeleted(structTargetGuid);
assertEntityDeleted(traitTargetGuid);
Assert.assertEquals(deletedEntities.size(), 2);
Assert.assertTrue(deletedEntities.containsAll(Arrays.asList(structTargetGuid, traitTargetGuid)));
assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(structContainerGuid);
// Delete the entity which contains nested structs and has the TestTrait trait.
deleteEntitiesResult =
repositoryService.deleteEntities(Arrays.asList(structContainerGuid));
verifyEntityDoesNotExist(structContainerGuid);
Assert.assertEquals(deleteEntitiesResult.left.size(), 1);
Assert.assertTrue(deleteEntitiesResult.left.contains(structContainerGuid));
deletedEntities = deleteEntities(structContainerGuid);
assertEntityDeleted(structContainerGuid);
Assert.assertEquals(deletedEntities.size(), 1);
Assert.assertTrue(deletedEntities.contains(structContainerGuid));
// Verify all TestStruct struct vertices were removed.
int vertexCount = countVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "TestStruct");
Assert.assertEquals(vertexCount, 0);
assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "TestStruct"));
// Verify all NestedStruct struct vertices were removed.
vertexCount = countVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "NestedStruct");
Assert.assertEquals(vertexCount, 0);
assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "NestedStruct"));
// Verify all TestTrait trait vertices were removed.
vertexCount = countVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "TestTrait");
Assert.assertEquals(vertexCount, 0);
assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "TestTrait"));
}
protected abstract void assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(
String structContainerGuid) throws Exception;
/**
* Verify deleting entities that are the target of class map references.
*/
......@@ -504,14 +688,11 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
List<String> guids = repositoryService.getEntityList("MapOwner");
Assert.assertEquals(guids.size(), 1);
String mapOwnerGuid = guids.get(0);
String edgeLabel = GraphHelper.getEdgeLabel(mapOwnerType, mapOwnerType.fieldMapping.fields.get("map"));
String mapEntryLabel = edgeLabel + "." + "value1";
AtlasEdgeLabel atlasEdgeLabel = new AtlasEdgeLabel(mapEntryLabel);
edgeLabel = GraphHelper.getEdgeLabel(mapOwnerType, mapOwnerType.fieldMapping.fields.get("biMap"));
mapEntryLabel = edgeLabel + "." + "value1";
AtlasEdgeLabel biMapAtlasEdgeLabel = new AtlasEdgeLabel(mapEntryLabel);
// Verify MapOwner.map attribute has expected value.
String mapValueGuid = null;
Vertex mapOwnerVertex = null;
......@@ -532,34 +713,22 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
// Delete the map value instance.
// This should disconnect the references from the map owner instance.
Pair<List<String>, List<ITypedReferenceableInstance>> deleteEntitiesResult =
repositoryService.deleteEntities(Arrays.asList(mapValueGuid));
verifyEntityDoesNotExist(mapValueGuid);
// Verify map references from mapOwner were disconnected.
mapOwnerInstance = repositoryService.getEntityDefinition(mapOwnerGuid);
Assert.assertNull(mapOwnerInstance.get("map"));
Assert.assertNull(mapOwnerInstance.get("biMap"));
mapOwnerVertex = GraphHelper.getInstance().getVertexForGUID(mapOwnerGuid);
Object object = mapOwnerVertex.getProperty(atlasEdgeLabel.getQualifiedMapKey());
Assert.assertNull(object);
object = mapOwnerVertex.getProperty(biMapAtlasEdgeLabel.getQualifiedMapKey());
Assert.assertNull(object);
deleteEntities(mapValueGuid);
assertEntityDeleted(mapValueGuid);
assertTestDisconnectMapReferenceFromClassType(mapOwnerGuid);
}
protected abstract void assertTestDisconnectMapReferenceFromClassType(String mapOwnerGuid) throws Exception;
private String createHrDeptGraph() throws Exception {
Referenceable deptEg1 = TestUtils.createDeptEg1(typeSystem);
ClassType deptType = typeSystem.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(deptEg1, Multiplicity.REQUIRED);
ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(typeSystem);
List<String> guids = repositoryService.createEntities(hrDept2);
List<String> guids = repositoryService.createEntities(hrDept);
Assert.assertNotNull(guids);
Assert.assertEquals(guids.size(), 5);
List<String> entityList = repositoryService.getEntityList("Department");
Assert.assertNotNull(entityList);
Assert.assertEquals(entityList.size(), 1);
return entityList.get(0);
hrDept = repositoryService.getEntityDefinition("Department", "name", "hr");
return hrDept.getId()._getId();
}
private void createDbTableGraph() throws Exception {
......@@ -592,22 +761,12 @@ public class GraphBackedMetadataRepositoryDeleteEntitiesTest {
repositoryService.createEntities(db, table);
}
private int countVertices(String propertyName, Object value) {
protected List<Vertex> getVertices(String propertyName, Object value) {
Iterable<Vertex> vertices = graphProvider.get().getVertices(propertyName, value);
int vertexCount = 0;
List<Vertex> list = new ArrayList<>();
for (Vertex vertex : vertices) {
vertexCount++;
list.add(vertex);
}
return vertexCount;
return list;
}
private void verifyEntityDoesNotExist(String guid) throws RepositoryException {
try {
repositoryService.getEntityDefinition(guid);
Assert.fail("EntityNotFoundException was expected but none thrown");
} catch(EntityNotFoundException e) {
// good
}
}
}
......@@ -28,6 +28,7 @@ import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.RequestContext;
import org.apache.atlas.TestUtils;
import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.repository.Constants;
......@@ -53,6 +54,7 @@ import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
......@@ -99,6 +101,10 @@ public class GraphBackedMetadataRepositoryTest {
TestUtils.createHiveTypes(typeSystem);
}
@BeforeMethod
public void setupContext() {
RequestContext.createContext();
}
@AfterClass
public void tearDown() throws Exception {
......@@ -116,14 +122,11 @@ public class GraphBackedMetadataRepositoryTest {
}
}
@Test
public void testSubmitEntity() throws Exception {
Referenceable hrDept = TestUtils.createDeptEg1(typeSystem);
ClassType deptType = typeSystem.getDataType(ClassType.class, "Department");
ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(typeSystem);
List<String> guids = repositoryService.createEntities(hrDept2);
List<String> guids = repositoryService.createEntities(hrDept);
Assert.assertNotNull(guids);
Assert.assertEquals(guids.size(), 5);
guid = guids.get(4);
......@@ -147,7 +150,7 @@ public class GraphBackedMetadataRepositoryTest {
@Test(dependsOnMethods = "testSubmitEntity")
public void testGetEntityList() throws Exception {
List<String> entityList = repositoryService.getEntityList(TestUtils.ENTITY_TYPE);
List<String> entityList = repositoryService.getEntityList(TestUtils.DEPARTMENT_TYPE);
System.out.println("entityList = " + entityList);
Assert.assertNotNull(entityList);
Assert.assertTrue(entityList.contains(guid));
......@@ -247,7 +250,7 @@ public class GraphBackedMetadataRepositoryTest {
final String aGUID = getGUID();
Vertex vertex = GraphHelper.getInstance().getVertexForGUID(aGUID);
Long modificationTimestampPreUpdate = vertex.getProperty(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY);
Assert.assertNull(modificationTimestampPreUpdate);
Assert.assertNotNull(modificationTimestampPreUpdate);
List<String> traitNames = repositoryService.getTraitNames(aGUID);
System.out.println("traitNames = " + traitNames);
......@@ -499,85 +502,6 @@ public class GraphBackedMetadataRepositoryTest {
row = (JSONObject) results.get(0);
Assert.assertEquals(row.get("typeName"), "Person");
}
@Test(dependsOnMethods = "testSubmitEntity")
public void testUpdateEntity_MultiplicityOneNonCompositeReference() throws Exception {
ITypedReferenceableInstance john = repositoryService.getEntityDefinition("Person", "name", "John");
Id johnGuid = john.getId();
ITypedReferenceableInstance max = repositoryService.getEntityDefinition("Person", "name", "Max");
String maxGuid = max.getId()._getId();
Vertex vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
Long creationTimestamp = vertex.getProperty(Constants.TIMESTAMP_PROPERTY_KEY);
Assert.assertNotNull(creationTimestamp);
Long modificationTimestampPreUpdate = vertex.getProperty(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY);
Assert.assertNull(modificationTimestampPreUpdate);
ITypedReferenceableInstance jane = repositoryService.getEntityDefinition("Person", "name", "Jane");
Id janeGuid = jane.getId();
// Update max's mentor reference to john.
ClassType personType = typeSystem.getDataType(ClassType.class, "Person");
ITypedReferenceableInstance instance = personType.createInstance(max.getId());
instance.set("mentor", johnGuid);
repositoryService.updatePartial(instance);
// Verify the update was applied correctly - john should now be max's mentor.
max = repositoryService.getEntityDefinition(maxGuid);
Object object = max.get("mentor");
Assert.assertTrue(object instanceof ITypedReferenceableInstance);
ITypedReferenceableInstance refTarget = (ITypedReferenceableInstance) object;
Assert.assertEquals(refTarget.getId()._getId(), johnGuid._getId());
// Verify modification timestamp was updated.
vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
Long modificationTimestampPostUpdate = vertex.getProperty(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY);
Assert.assertNotNull(modificationTimestampPostUpdate);
Assert.assertTrue(creationTimestamp < modificationTimestampPostUpdate);
// Update max's mentor reference to jane.
instance.set("mentor", janeGuid);
repositoryService.updatePartial(instance);
// Verify the update was applied correctly - jane should now be max's mentor.
max = repositoryService.getEntityDefinition(maxGuid);
object = max.get("mentor");
Assert.assertTrue(object instanceof ITypedReferenceableInstance);
refTarget = (ITypedReferenceableInstance) object;
Assert.assertEquals(refTarget.getId()._getId(), janeGuid._getId());
// Verify modification timestamp was updated.
vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
Long modificationTimestampPost2ndUpdate = vertex.getProperty(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY);
Assert.assertNotNull(modificationTimestampPost2ndUpdate);
Assert.assertTrue(modificationTimestampPostUpdate < modificationTimestampPost2ndUpdate);
ITypedReferenceableInstance julius = repositoryService.getEntityDefinition("Person", "name", "Julius");
Id juliusGuid = julius.getId();
instance = personType.createInstance(max.getId());
instance.set("manager", juliusGuid);
repositoryService.updatePartial(instance);
// Verify the update was applied correctly - julius should now be max's manager.
max = repositoryService.getEntityDefinition(maxGuid);
object = max.get("manager");
Assert.assertTrue(object instanceof ITypedReferenceableInstance);
refTarget = (ITypedReferenceableInstance) object;
Assert.assertEquals(refTarget.getId()._getId(), juliusGuid._getId());
// Verify that max is no longer a subordinate of jane.
jane = repositoryService.getEntityDefinition(janeGuid._getId());
Object refValue = jane.get("subordinates");
Assert.assertTrue(refValue instanceof List);
List<Object> subordinates = (List<Object>)refValue;
Assert.assertEquals(subordinates.size(), 1);
Object listValue = subordinates.get(0);
Assert.assertTrue(listValue instanceof ITypedReferenceableInstance);
ITypedReferenceableInstance subordinate = (ITypedReferenceableInstance) listValue;
Assert.assertNotEquals(subordinate.getId()._getId(), maxGuid);
}
private ITypedReferenceableInstance createHiveTableInstance(Referenceable databaseInstance) throws Exception {
Referenceable tableInstance = new Referenceable(TestUtils.TABLE_TYPE, TestUtils.CLASSIFICATION);
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.graph;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.TestUtils;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.testng.Assert;
import java.util.List;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.fail;
import static org.testng.AssertJUnit.assertNotNull;
public class GraphBackedRepositoryHardDeleteTest extends GraphBackedMetadataRepositoryDeleteTestBase {
@Override
DeleteHandler getDeleteHandler(TypeSystem typeSystem) {
return new HardDeleteHandler(typeSystem);
}
@Override
protected void assertTestDeleteReference(ITypedReferenceableInstance processInstance) throws Exception {
//assert that outputs is empty
ITypedReferenceableInstance newProcess =
repositoryService.getEntityDefinition(processInstance.getId()._getId());
assertNull(newProcess.get(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS));
}
@Override
protected void assertEntityDeleted(String id) throws Exception {
try {
repositoryService.getEntityDefinition(id);
fail("Expected EntityNotFoundException");
} catch(EntityNotFoundException e) {
//expected
}
}
@Override
protected void assertTestDeleteEntities(ITypedReferenceableInstance tableInstance) {
int vertexCount = getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, TestUtils.TABLE_TYPE).size();
assertEquals(vertexCount, 0);
vertexCount = getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, TestUtils.COLUMN_TYPE).size();
assertEquals(vertexCount, 0);
}
@Override
protected void assertVerticesDeleted(List<Vertex> vertices) {
assertEquals(vertices.size(), 0);
}
@Override
protected void assertTestUpdateEntity_MultiplicityOneNonCompositeReference() throws Exception {
// Verify that max is no longer a subordinate of jane.
ITypedReferenceableInstance jane = repositoryService.getEntityDefinition("Manager", "name", "Jane");
List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
Assert.assertEquals(subordinates.size(), 1);
}
@Override
protected void assertTestDisconnectBidirectionalReferences() throws Exception {
// Verify that the Manager.subordinates reference to the deleted employee
// Max was disconnected.
ITypedReferenceableInstance jane = repositoryService.getEntityDefinition("Manager", "name", "Jane");
List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
assertEquals(subordinates.size(), 1);
}
@Override
protected void assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(String structContainerGuid)
throws Exception {
// Verify that the unidirectional references from the struct and trait instances
// to the deleted entities were disconnected.
ITypedReferenceableInstance structContainerConvertedEntity =
repositoryService.getEntityDefinition(structContainerGuid);
ITypedStruct struct = (ITypedStruct) structContainerConvertedEntity.get("struct");
assertNull(struct.get("target"));
IStruct trait = structContainerConvertedEntity.getTrait("TestTrait");
assertNotNull(trait);
assertNull(trait.get("target"));
}
@Override
protected void assertTestDisconnectMapReferenceFromClassType(String mapOwnerGuid) throws Exception {
// Verify map references from mapOwner were disconnected.
ITypedReferenceableInstance mapOwnerInstance = repositoryService.getEntityDefinition(mapOwnerGuid);
assertNull(mapOwnerInstance.get("map"));
assertNull(mapOwnerInstance.get("biMap"));
Vertex mapOwnerVertex = GraphHelper.getInstance().getVertexForGUID(mapOwnerGuid);
Object object = mapOwnerVertex.getProperty("MapOwner.map.value1");
assertNull(object);
object = mapOwnerVertex.getProperty("MapOwner.biMap.value1");
assertNull(object);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.repository.graph;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.TestUtils;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.testng.Assert;
import java.util.List;
import java.util.Map;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
public class GraphBackedRepositorySoftDeleteTest extends GraphBackedMetadataRepositoryDeleteTestBase {
@Override
DeleteHandler getDeleteHandler(TypeSystem typeSystem) {
return new SoftDeleteHandler(typeSystem);
}
@Override
protected void assertTestDeleteReference(ITypedReferenceableInstance expected) throws Exception {
ITypedReferenceableInstance process = repositoryService.getEntityDefinition(expected.getId()._getId());
List<ITypedReferenceableInstance> outputs =
(List<ITypedReferenceableInstance>) process.get(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS);
List<ITypedReferenceableInstance> expectedOutputs =
(List<ITypedReferenceableInstance>) process.get(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS);
assertEquals(outputs.size(), expectedOutputs.size());
}
@Override
protected void assertEntityDeleted(String id) throws Exception {
ITypedReferenceableInstance entity = repositoryService.getEntityDefinition(id);
assertEquals(entity.getId().getState(), Id.EntityState.DELETED);
}
@Override
protected void assertTestDeleteEntities(ITypedReferenceableInstance expected) throws Exception {
//Assert that the deleted table can be fully constructed back
ITypedReferenceableInstance table = repositoryService.getEntityDefinition(expected.getId()._getId());
List<ITypedReferenceableInstance> columns =
(List<ITypedReferenceableInstance>) table.get(TestUtils.COLUMNS_ATTR_NAME);
List<ITypedReferenceableInstance> expectedColumns =
(List<ITypedReferenceableInstance>) table.get(TestUtils.COLUMNS_ATTR_NAME);
assertEquals(columns.size(), expectedColumns.size());
}
@Override
protected void assertVerticesDeleted(List<Vertex> vertices) {
for (Vertex vertex : vertices) {
assertEquals(vertex.getProperty(Constants.STATE_PROPERTY_KEY), Id.EntityState.DELETED.name());
}
}
@Override
protected void assertTestUpdateEntity_MultiplicityOneNonCompositeReference() throws Exception {
// Verify that max is no longer a subordinate of jane.
ITypedReferenceableInstance jane = repositoryService.getEntityDefinition("Manager", "name", "Jane");
List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
Assert.assertEquals(subordinates.size(), 2);
}
@Override
protected void assertTestDisconnectBidirectionalReferences() throws Exception {
// Verify that the Manager.subordinates still references deleted employee
ITypedReferenceableInstance jane = repositoryService.getEntityDefinition("Manager", "name", "Jane");
List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
assertEquals(subordinates.size(), 2);
}
@Override
protected void assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(String structContainerGuid)
throws Exception {
// Verify that the unidirectional references from the struct and trait instances
// to the deleted entities were disconnected.
ITypedReferenceableInstance structContainerConvertedEntity =
repositoryService.getEntityDefinition(structContainerGuid);
ITypedStruct struct = (ITypedStruct) structContainerConvertedEntity.get("struct");
assertNotNull(struct.get("target"));
IStruct trait = structContainerConvertedEntity.getTrait("TestTrait");
assertNotNull(trait);
assertNotNull(trait.get("target"));
}
@Override
protected void assertTestDisconnectMapReferenceFromClassType(String mapOwnerGuid) throws Exception {
ITypedReferenceableInstance mapOwnerInstance = repositoryService.getEntityDefinition(mapOwnerGuid);
Map<String, ITypedReferenceableInstance> map =
(Map<String, ITypedReferenceableInstance>) mapOwnerInstance.get("map");
assertNotNull(map);
assertEquals(map.size(), 1);
Map<String, ITypedReferenceableInstance> biMap =
(Map<String, ITypedReferenceableInstance>) mapOwnerInstance.get("biMap");
assertNotNull(biMap);
assertEquals(biMap.size(), 1);
}
}
......@@ -27,6 +27,7 @@ import com.tinkerpop.blueprints.Predicate;
import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.RequestContext;
import org.apache.atlas.TestUtils;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
......@@ -39,6 +40,7 @@ import org.apache.atlas.typesystem.types.TypeSystem;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
......@@ -75,6 +77,11 @@ public class GraphRepoMapperScaleTest {
searchIndexer.onAdd(typesAdded);
}
@BeforeMethod
public void setupContext() {
RequestContext.createContext();
}
@AfterClass
public void tearDown() throws Exception {
TypeSystem.getInstance().reset();
......
......@@ -178,7 +178,7 @@ public class GraphBackedTypeStoreTest {
HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department", "Department"+_description,
ImmutableSet.of(superTypeDef.typeName), createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.COLLECTION,
new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.OPTIONAL,
true, "department"));
TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.of(orgLevelEnum), ImmutableList.of(addressDetails),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
......@@ -227,7 +227,7 @@ public class GraphBackedTypeStoreTest {
createOptionalAttrDef("name", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department",
ImmutableSet.of("Division", superTypeDef2.typeName), createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.COLLECTION,
new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.OPTIONAL,
true, "department"));
TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
......@@ -269,9 +269,9 @@ public class GraphBackedTypeStoreTest {
private int countOutgoingEdges(Vertex typeVertex, String edgeLabel) {
Iterable<Edge> outGoingEdgesByLabel = GraphHelper.getOutGoingEdgesByLabel(typeVertex, edgeLabel);
Iterator<Edge> outGoingEdgesByLabel = GraphHelper.getOutGoingEdgesByLabel(typeVertex, edgeLabel);
int edgeCount = 0;
for (Iterator<Edge> iterator = outGoingEdgesByLabel.iterator(); iterator.hasNext();) {
for (Iterator<Edge> iterator = outGoingEdgesByLabel; iterator.hasNext();) {
iterator.next();
edgeCount++;
}
......
......@@ -24,22 +24,15 @@ import com.google.inject.Inject;
import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.util.TitanCleanup;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.repository.audit.EntityAuditRepository;
import org.apache.atlas.repository.audit.HBaseBasedAuditRepository;
import org.apache.atlas.repository.audit.HBaseTestUtils;
import org.apache.atlas.typesystem.exception.TypeNotFoundException;
import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.apache.atlas.utils.ParamChecker;
import org.apache.atlas.AtlasException;
import org.apache.atlas.EntityAuditEvent;
import org.apache.atlas.RepositoryMetadataModule;
import org.apache.atlas.RequestContext;
import org.apache.atlas.TestUtils;
import org.apache.atlas.listener.EntityChangeListener;
import org.apache.atlas.repository.audit.EntityAuditRepository;
import org.apache.atlas.repository.audit.HBaseBasedAuditRepository;
import org.apache.atlas.repository.audit.HBaseTestUtils;
import org.apache.atlas.repository.graph.GraphProvider;
import org.apache.atlas.services.MetadataService;
import org.apache.atlas.typesystem.IReferenceableInstance;
......@@ -48,12 +41,19 @@ import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.TypesDef;
import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.typesystem.exception.TypeNotFoundException;
import org.apache.atlas.typesystem.json.InstanceSerialization;
import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.EnumValue;
import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.atlas.typesystem.types.ValueConversionException;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.apache.atlas.utils.ParamChecker;
import org.apache.commons.lang.RandomStringUtils;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
......@@ -67,15 +67,19 @@ import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.atlas.TestUtils.COLUMNS_ATTR_NAME;
import static org.apache.atlas.TestUtils.COLUMN_TYPE;
import static org.apache.atlas.TestUtils.TABLE_TYPE;
import static org.apache.atlas.TestUtils.createColumnEntity;
import static org.apache.atlas.TestUtils.createDBEntity;
import static org.apache.atlas.TestUtils.createTableEntity;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
@Guice(modules = RepositoryMetadataModule.class)
......@@ -87,25 +91,22 @@ public class DefaultMetadataServiceTest {
private GraphProvider<TitanGraph> graphProvider;
@Inject
private EntityAuditRepository repository;
private EntityAuditRepository auditRepository;
private Referenceable db = createDBEntity();
private Id dbId;
private Referenceable table;
private Id tableId;
private final String NAME = "name";
private final String COLUMNS_ATTR_NAME = "columns";
@BeforeTest
public void setUp() throws Exception {
if (repository instanceof HBaseBasedAuditRepository) {
if (auditRepository instanceof HBaseBasedAuditRepository) {
HBaseTestUtils.startCluster();
((HBaseBasedAuditRepository) repository).start();
((HBaseBasedAuditRepository) auditRepository).start();
}
RequestContext.createContext();
RequestContext.get().setUser("testuser");
......@@ -118,9 +119,7 @@ public class DefaultMetadataServiceTest {
}
String dbGUid = createInstance(db);
dbId = new Id(dbGUid, 0, TestUtils.DATABASE_TYPE);
table = createTableEntity(dbId);
table = createTableEntity(dbGUid);
String tableGuid = createInstance(table);
String tableDefinitionJson =
metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
......@@ -143,25 +142,28 @@ public class DefaultMetadataServiceTest {
e.printStackTrace();
}
if (repository instanceof HBaseBasedAuditRepository) {
((HBaseBasedAuditRepository) repository).stop();
if (auditRepository instanceof HBaseBasedAuditRepository) {
((HBaseBasedAuditRepository) auditRepository).stop();
HBaseTestUtils.stopCluster();
}
}
private String createInstance(Referenceable entity) throws Exception {
RequestContext.createContext();
String entityjson = InstanceSerialization.toJson(entity, true);
JSONArray entitiesJson = new JSONArray();
entitiesJson.put(entityjson);
String response = metadataService.createEntities(entitiesJson.toString());
JSONArray guids = new JSONArray(response);
if (guids != null && guids.length() > 0) {
return guids.getString(0);
return guids.getString(guids.length() - 1);
}
return null;
}
private String updateInstance(Referenceable entity) throws Exception {
RequestContext.createContext();
ParamChecker.notNull(entity, "Entity");
ParamChecker.notNull(entity.getId(), "Entity");
String entityjson = InstanceSerialization.toJson(entity, true);
......@@ -171,33 +173,6 @@ public class DefaultMetadataServiceTest {
return new JSONArray(response).getString(0);
}
private Referenceable createDBEntity() {
Referenceable entity = new Referenceable(TestUtils.DATABASE_TYPE);
String dbName = RandomStringUtils.randomAlphanumeric(10);
entity.set(NAME, dbName);
entity.set("description", "us db");
return entity;
}
private Referenceable createTableEntity(Id dbId) {
Referenceable entity = new Referenceable(TestUtils.TABLE_TYPE);
String tableName = RandomStringUtils.randomAlphanumeric(10);
entity.set(NAME, tableName);
entity.set("description", "random table");
entity.set("type", "type");
entity.set("tableType", "MANAGED");
entity.set("database", dbId);
entity.set("created", new Date());
return entity;
}
private Referenceable createColumnEntity() {
Referenceable entity = new Referenceable(TestUtils.COLUMN_TYPE);
entity.set(NAME, RandomStringUtils.randomAlphanumeric(10));
entity.set("type", "VARCHAR(32)");
return entity;
}
@Test(expectedExceptions = TypeNotFoundException.class)
public void testCreateEntityWithUnknownDatatype() throws Exception {
Referenceable entity = new Referenceable("Unknown datatype");
......@@ -246,8 +221,14 @@ public class DefaultMetadataServiceTest {
assertAuditEvents(id, EntityAuditEvent.EntityAuditAction.ENTITY_DELETE);
}
private List<String> deleteEntities(String... guids) throws AtlasException {
RequestContext.createContext();
return metadataService.deleteEntities(Arrays.asList(guids));
}
private void assertAuditEvents(String id, EntityAuditEvent.EntityAuditAction expectedAction) throws Exception {
List<EntityAuditEvent> events = repository.listEvents(id, null, (short) 10);
List<EntityAuditEvent> events =
auditRepository.listEvents(id, null, (short) 10);
for (EntityAuditEvent event : events) {
if (event.getAction() == expectedAction) {
return;
......@@ -257,7 +238,7 @@ public class DefaultMetadataServiceTest {
}
private void assertAuditEvents(String entityId, int numEvents) throws Exception {
List<EntityAuditEvent> events = repository.listEvents(entityId, null, (short)numEvents);
List<EntityAuditEvent> events = metadataService.getAuditEvents(entityId, null, (short) numEvents);
assertNotNull(events);
assertEquals(events.size(), numEvents);
}
......@@ -357,7 +338,7 @@ public class DefaultMetadataServiceTest {
Assert.assertTrue(partsMap.get("part2").equalsContents(((Map<String, Struct>)tableDefinition.get("partitionsMap")).get("part2")));
//update struct value for existing map key
Struct partition2 = (Struct)partsMap.get("part2");
Struct partition2 = partsMap.get("part2");
partition2.set(NAME, "test2Updated");
updateInstance(table);
tableDefinitionJson =
......@@ -457,6 +438,11 @@ public class DefaultMetadataServiceTest {
Assert.assertEquals(actualColumns, updatedColNameList);
}
private void updateEntityPartial(String guid, Referenceable entity) throws AtlasException {
RequestContext.createContext();
metadataService.updateEntityPartialByGuid(guid, entity);
}
@Test
public void testUpdateEntityArrayOfClass() throws Exception {
......@@ -470,7 +456,7 @@ public class DefaultMetadataServiceTest {
Referenceable tableUpdated = new Referenceable(TestUtils.TABLE_TYPE, new HashMap<String, Object>() {{
put(COLUMNS_ATTR_NAME, columns);
}});
metadataService.updateEntityPartialByGuid(tableId._getId(), tableUpdated);
updateEntityPartial(tableId._getId(), tableUpdated);
verifyArrayUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), columns, COLUMNS_ATTR_NAME);
......@@ -488,7 +474,7 @@ public class DefaultMetadataServiceTest {
tableUpdated = new Referenceable(TestUtils.TABLE_TYPE, new HashMap<String, Object>() {{
put(COLUMNS_ATTR_NAME, updateColumns);
}});
metadataService.updateEntityPartialByGuid(tableId._getId(), tableUpdated);
updateEntityPartial(tableId._getId(), tableUpdated);
verifyArrayUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), updateColumns, COLUMNS_ATTR_NAME);
......@@ -566,15 +552,6 @@ public class DefaultMetadataServiceTest {
}
}
private void assertReferenceables(Referenceable r1, Referenceable r2) {
assertEquals(r1.getTypeName(), r2.getTypeName());
assertTrue(r1.getTraits().equals(r2.getTraits()));
for (String attr : r1.getValuesMap().keySet()) {
assertTrue(r1.getValuesMap().get(attr).equals(r2.getValuesMap().get(attr)));
}
//TODO assert trait instances and complex attributes
}
@Test
public void testStructs() throws Exception {
Struct serdeInstance = new Struct(TestUtils.SERDE_TYPE);
......@@ -853,76 +830,48 @@ public class DefaultMetadataServiceTest {
//expected
}
}
@Test
public void testDeleteEntities() throws Exception {
// Create 2 table entities, each with 3 composite column entities
// Create a table entity, with 3 composite column entities
Referenceable dbEntity = createDBEntity();
String dbGuid = createInstance(dbEntity);
Id dbId = new Id(dbGuid, 0, TestUtils.DATABASE_TYPE);
Referenceable table1Entity = createTableEntity(dbId);
Referenceable table2Entity = createTableEntity(dbId);
Referenceable table1Entity = createTableEntity(dbGuid);
Referenceable col1 = createColumnEntity();
Referenceable col2 = createColumnEntity();
Referenceable col3 = createColumnEntity();
table1Entity.set(COLUMNS_ATTR_NAME, ImmutableList.of(col1, col2, col3));
table2Entity.set(COLUMNS_ATTR_NAME, ImmutableList.of(col1, col2, col3));
createInstance(table1Entity);
createInstance(table2Entity);
// Retrieve the table entities from the repository,
// to get their guids and the composite column guids.
String entityJson = metadataService.getEntityDefinition(TestUtils.TABLE_TYPE,
NAME, (String)table1Entity.get(NAME));
String entityJson = metadataService.getEntityDefinition(TestUtils.TABLE_TYPE,
NAME, (String)table1Entity.get(NAME));
Assert.assertNotNull(entityJson);
table1Entity = InstanceSerialization.fromJsonReferenceable(entityJson, true);
Object val = table1Entity.get(COLUMNS_ATTR_NAME);
Assert.assertTrue(val instanceof List);
List<IReferenceableInstance> table1Columns = (List<IReferenceableInstance>) val;
entityJson = metadataService.getEntityDefinition(TestUtils.TABLE_TYPE,
NAME, (String)table2Entity.get(NAME));
Assert.assertNotNull(entityJson);
table2Entity = InstanceSerialization.fromJsonReferenceable(entityJson, true);
val = table2Entity.get(COLUMNS_ATTR_NAME);
Assert.assertTrue(val instanceof List);
List<IReferenceableInstance> table2Columns = (List<IReferenceableInstance>) val;
List<IReferenceableInstance> table1Columns = (List<IReferenceableInstance>) table1Entity.get(COLUMNS_ATTR_NAME);
// Register an EntityChangeListener to verify the notification mechanism
// is working for deleteEntities().
DeleteEntitiesChangeListener listener = new DeleteEntitiesChangeListener();
metadataService.registerListener(listener);
// Delete the table entities. The deletion should cascade
// to their composite columns.
JSONArray deleteCandidateGuids = new JSONArray();
deleteCandidateGuids.put(table1Entity.getId()._getId());
deleteCandidateGuids.put(table2Entity.getId()._getId());
List<String> deletedGuids = metadataService.deleteEntities(
Arrays.asList(table1Entity.getId()._getId(), table2Entity.getId()._getId()));
List<String> deletedGuids = deleteEntities(table1Entity.getId()._getId());
// Verify that deleteEntities() response has guids for tables and their composite columns.
// Verify that deleteEntities() response has guids for tables and their composite columns.
Assert.assertTrue(deletedGuids.contains(table1Entity.getId()._getId()));
Assert.assertTrue(deletedGuids.contains(table2Entity.getId()._getId()));
for (IReferenceableInstance column : table1Columns) {
Assert.assertTrue(deletedGuids.contains(column.getId()._getId()));
}
for (IReferenceableInstance column : table2Columns) {
Assert.assertTrue(deletedGuids.contains(column.getId()._getId()));
}
// Verify that tables and their composite columns have been deleted from the repository.
for (String guid : deletedGuids) {
try {
metadataService.getEntityDefinition(guid);
Assert.fail(EntityNotFoundException.class.getSimpleName() +
" expected but not thrown. The entity with guid " + guid +
" still exists in the repository after being deleted." );
}
catch(EntityNotFoundException e) {
// The entity does not exist in the repository, so deletion was successful.
}
}
assertEntityDeleted(TABLE_TYPE, NAME, table1Entity.get(NAME));
assertEntityDeleted(COLUMN_TYPE, NAME, col1.get(NAME));
assertEntityDeleted(COLUMN_TYPE, NAME, col2.get(NAME));
assertEntityDeleted(COLUMN_TYPE, NAME, col3.get(NAME));
// Verify that the listener was notified about the deleted entities.
Collection<ITypedReferenceableInstance> deletedEntitiesFromListener = listener.getDeletedEntities();
Assert.assertNotNull(deletedEntitiesFromListener);
......@@ -935,13 +884,22 @@ public class DefaultMetadataServiceTest {
Assert.assertTrue(deletedGuidsFromListener.containsAll(deletedGuids));
}
private void assertEntityDeleted(String typeName, String attributeName, Object attributeValue)
throws AtlasException {
try {
metadataService.getEntityDefinition(typeName, attributeName, (String) attributeValue);
fail("Expected EntityNotFoundException");
} catch(EntityNotFoundException e) {
//expected
}
}
@Test
public void testDeleteEntityByUniqueAttribute() throws Exception {
// Create 2 table entities, each with 3 composite column entities
// Create a table entity, with 3 composite column entities
Referenceable dbEntity = createDBEntity();
String dbGuid = createInstance(dbEntity);
Id dbId = new Id(dbGuid, 0, TestUtils.DATABASE_TYPE);
Referenceable table1Entity = createTableEntity(dbId);
Referenceable table1Entity = createTableEntity(dbGuid);
Referenceable col1 = createColumnEntity();
Referenceable col2 = createColumnEntity();
Referenceable col3 = createColumnEntity();
......@@ -950,12 +908,10 @@ public class DefaultMetadataServiceTest {
// to get their guids and the composite column guids.
String entityJson = metadataService.getEntityDefinition(TestUtils.TABLE_TYPE,
NAME, (String)table1Entity.get(NAME));
NAME, (String) table1Entity.get(NAME));
Assert.assertNotNull(entityJson);
table1Entity = InstanceSerialization.fromJsonReferenceable(entityJson, true);
Object val = table1Entity.get(COLUMNS_ATTR_NAME);
Assert.assertTrue(val instanceof List);
List<IReferenceableInstance> table1Columns = (List<IReferenceableInstance>) val;
List<IReferenceableInstance> table1Columns = (List<IReferenceableInstance>) table1Entity.get(COLUMNS_ATTR_NAME);
// Register an EntityChangeListener to verify the notification mechanism
// is working for deleteEntityByUniqueAttribute().
......@@ -973,17 +929,11 @@ public class DefaultMetadataServiceTest {
}
// Verify that tables and their composite columns have been deleted from the repository.
for (String guid : deletedGuids) {
try {
metadataService.getEntityDefinition(guid);
Assert.fail(EntityNotFoundException.class.getSimpleName() +
" expected but not thrown. The entity with guid " + guid +
" still exists in the repository after being deleted." );
}
catch(EntityNotFoundException e) {
// The entity does not exist in the repository, so deletion was successful.
}
}
// Verify that tables and their composite columns have been deleted from the repository.
assertEntityDeleted(TABLE_TYPE, NAME, table1Entity.get(NAME));
assertEntityDeleted(COLUMN_TYPE, NAME, col1.get(NAME));
assertEntityDeleted(COLUMN_TYPE, NAME, col2.get(NAME));
assertEntityDeleted(COLUMN_TYPE, NAME, col3.get(NAME));
// Verify that the listener was notified about the deleted entities.
Collection<ITypedReferenceableInstance> deletedEntitiesFromListener = listener.getDeletedEntities();
......
......@@ -37,7 +37,7 @@ class GremlinTest extends BaseGremlinTest {
TypeSystem.getInstance().reset()
QueryTestsUtils.setupTypes
gProvider = new TitanGraphProvider()
gp = new DefaultGraphPersistenceStrategy(new GraphBackedMetadataRepository(gProvider))
gp = new DefaultGraphPersistenceStrategy(new GraphBackedMetadataRepository(gProvider, null))
g = QueryTestsUtils.setupTestGraph(gProvider)
}
......
......@@ -37,7 +37,7 @@ class GremlinTest2 extends BaseGremlinTest {
TypeSystem.getInstance().reset()
QueryTestsUtils.setupTypes
gProvider = new TitanGraphProvider();
gp = new DefaultGraphPersistenceStrategy(new GraphBackedMetadataRepository(gProvider))
gp = new DefaultGraphPersistenceStrategy(new GraphBackedMetadataRepository(gProvider, null))
g = QueryTestsUtils.setupTestGraph(gProvider)
}
......
......@@ -37,7 +37,7 @@ class LineageQueryTest extends BaseGremlinTest {
TypeSystem.getInstance().reset()
QueryTestsUtils.setupTypes
gProvider = new TitanGraphProvider();
gp = new DefaultGraphPersistenceStrategy(new GraphBackedMetadataRepository(gProvider))
gp = new DefaultGraphPersistenceStrategy(new GraphBackedMetadataRepository(gProvider, null))
g = QueryTestsUtils.setupTestGraph(gProvider)
}
......
......@@ -164,7 +164,7 @@ object QueryTestsUtils extends GraphUtils {
val bindings: Bindings = engine.createBindings
bindings.put("g", g)
val hiveGraphFile = FileUtils.getTempDirectory().getPath.toString + File.separator + System.nanoTime() + ".gson"
val hiveGraphFile = FileUtils.getTempDirectory().getPath + File.separator + System.nanoTime() + ".gson"
HiveTitanSample.writeGson(hiveGraphFile)
bindings.put("hiveGraphFile", hiveGraphFile)
......
......@@ -18,15 +18,33 @@
package org.apache.atlas;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
public class RequestContext {
private static final Logger LOG = LoggerFactory.getLogger(RequestContext.class);
private static final ThreadLocal<RequestContext> CURRENT_CONTEXT = new ThreadLocal<>();
private Set<String> createdEntityIds = new LinkedHashSet<>();
private Set<String> updatedEntityIds = new LinkedHashSet<>();
private Set<String> deletedEntityIds = new LinkedHashSet<>();
private List<ITypedReferenceableInstance> deletedEntities = new ArrayList<>();
private String user;
private long requestTime;
TypeSystem typeSystem = TypeSystem.getInstance();
private RequestContext() {
}
......@@ -37,6 +55,7 @@ public class RequestContext {
public static RequestContext createContext() {
RequestContext context = new RequestContext();
context.requestTime = System.currentTimeMillis();
CURRENT_CONTEXT.set(context);
return context;
}
......@@ -52,4 +71,40 @@ public class RequestContext {
public void setUser(String user) {
this.user = user;
}
public void recordCreatedEntities(Collection<String> createdEntityIds) {
this.createdEntityIds.addAll(createdEntityIds);
}
public void recordUpdatedEntities(Collection<String> updatedEntityIds) {
this.updatedEntityIds.addAll(updatedEntityIds);
}
public void recordDeletedEntity(String entityId, String typeName) throws AtlasException {
ClassType type = typeSystem.getDataType(ClassType.class, typeName);
ITypedReferenceableInstance entity = type.createInstance(new Id(entityId, 0, typeName));
if (deletedEntityIds.add(entityId)) {
deletedEntities.add(entity);
}
}
public List<String> getCreatedEntityIds() {
return new ArrayList<>(createdEntityIds);
}
public List<String> getUpdatedEntityIds() {
return new ArrayList<>(updatedEntityIds);
}
public List<String> getDeletedEntityIds() {
return new ArrayList<>(deletedEntityIds);
}
public List<ITypedReferenceableInstance> getDeletedEntities() {
return deletedEntities;
}
public long getRequestTime() {
return requestTime;
}
}
......@@ -37,4 +37,5 @@ public interface IInstance {
Map<String, Object> getValuesMap() throws AtlasException;
String toShortString();
}
......@@ -166,6 +166,11 @@ public class Referenceable extends Struct implements IReferenceableInstance {
'}';
}
@Override
public String toShortString() {
return String.format("entity[type=%s guid=%s]", typeName, id._getId());
}
public void replaceWithNewId(Id id) {
this.id = id;
}
......
......@@ -78,6 +78,11 @@ public class Struct implements IStruct {
}
@Override
public String toShortString() {
return String.format("struct[type=%s]", typeName);
}
@Override
public int hashCode() {
int result = typeName.hashCode();
result = 31 * result + values.hashCode();
......
......@@ -70,6 +70,11 @@ public class DownCastStructInstance implements IStruct {
}
return m;
}
@Override
public String toShortString() {
return toString();
}
}
......@@ -58,20 +58,20 @@ public class Id implements ITypedReferenceableInstance {
}
}
public Id(String id, int version, String className) {
this(id, version, className, null);
public Id(String id, int version, String typeName) {
this(id, version, typeName, null);
}
public Id(long id, int version, String className) {
this("" + id, version, className);
public Id(long id, int version, String typeName) {
this("" + id, version, typeName);
}
public Id(long id, int version, String className, String state) {
this("" + id, version, className, state);
public Id(long id, int version, String typeName, String state) {
this("" + id, version, typeName, state);
}
public Id(String className) {
this("" + (-System.nanoTime()), 0, className);
public Id(String typeName) {
this("" + (-System.nanoTime()), 0, typeName);
}
public boolean isUnassigned() {
......@@ -93,10 +93,16 @@ public class Id implements ITypedReferenceableInstance {
return true;
}
@Override
public String toString() {
return String.format("(type: %s, id: %s)", typeName, isUnassigned() ? "<unassigned>" : "" + id);
}
@Override
public String toShortString() {
return String.format("id[type=%s guid=%s state=%s]", typeName, id, state);
}
public String getClassName() {
return typeName;
}
......
......@@ -98,6 +98,19 @@ public class ReferenceableInstance extends StructInstance implements ITypedRefer
}
@Override
public String toShortString() {
String name = null;
if (fieldMapping().fields.containsKey("name")) {
try {
name = getString("name");
} catch (AtlasException e) {
//ignore if there is no field name
}
}
return String.format("entity[type=%s guid=%s name=%s]", getTypeName(), getId()._getId(), name);
}
@Override
public String getSignatureHash(MessageDigest digester) throws AtlasException {
ClassType classType = TypeSystem.getInstance().getDataType(ClassType.class, getTypeName());
classType.updateSignatureHash(digester, this);
......
......@@ -764,4 +764,9 @@ public class StructInstance implements ITypedStruct {
byte[] digest = digester.digest();
return MD5Utils.toString(digest);
}
@Override
public String toShortString() {
return String.format("struct[type=%s]", dataTypeName);
}
}
......@@ -17,10 +17,12 @@
package org.apache.atlas;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.apache.commons.configuration.Configuration;
import org.testng.Assert;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
public class ApplicationPropertiesTest {
@Test
......@@ -28,17 +30,17 @@ public class ApplicationPropertiesTest {
Configuration properties = ApplicationProperties.get(ApplicationProperties.APPLICATION_PROPERTIES);
//plain property without variables
Assert.assertEquals(properties.getString("atlas.service"), "atlas");
assertEquals(properties.getString("atlas.service"), "atlas");
//property containing system property
String data = "/var/data/" + System.getProperty("user.name") + "/atlas";
Assert.assertEquals(properties.getString("atlas.data"), data);
assertEquals(properties.getString("atlas.data"), data);
//property referencing other property
Assert.assertEquals(properties.getString("atlas.graph.data"), data + "/graph");
assertEquals(properties.getString("atlas.graph.data"), data + "/graph");
//invalid system property - not substituted
Assert.assertEquals(properties.getString("atlas.db"), "${atlasdb}");
assertEquals(properties.getString("atlas.db"), "${atlasdb}");
}
@Test
......@@ -47,9 +49,20 @@ public class ApplicationPropertiesTest {
Configuration configuration = ApplicationProperties.get(ApplicationProperties.APPLICATION_PROPERTIES);
Configuration subConfiguration = configuration.subset("atlas");
Assert.assertEquals(subConfiguration.getString("service"), "atlas");
assertEquals(subConfiguration.getString("service"), "atlas");
String data = "/var/data/" + System.getProperty("user.name") + "/atlas";
Assert.assertEquals(subConfiguration.getString("data"), data);
Assert.assertEquals(subConfiguration.getString("graph.data"), data + "/graph");
assertEquals(subConfiguration.getString("data"), data);
assertEquals(subConfiguration.getString("graph.data"), data + "/graph");
}
@Test
public void testGetClass() throws Exception {
//read from atlas-application.properties
Class cls = ApplicationProperties.getClass("atlas.TypeSystem.impl", ApplicationProperties.class.getName());
assertEquals(cls.getName(), TypeSystem.class.getName());
//default value
cls = ApplicationProperties.getClass("atlas.TypeSystem2.impl", TypeSystem.class.getName());
assertEquals(cls.getName(), TypeSystem.class.getName());
}
}
......@@ -26,4 +26,6 @@ atlas.graph.data=${atlas.data}/graph
atlas.service=atlas
#invalid system property
atlas.db=${atlasdb}
\ No newline at end of file
atlas.db=${atlasdb}
atlas.TypeSystem.impl=org.apache.atlas.typesystem.types.TypeSystem
\ No newline at end of file
......@@ -21,6 +21,7 @@ package org.apache.atlas.notification;
import com.google.inject.Inject;
import org.apache.atlas.notification.hook.HookNotification;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.web.resources.BaseResourceIT;
import org.codehaus.jettison.json.JSONArray;
import org.testng.annotations.AfterClass;
......@@ -127,20 +128,19 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
@Test
public void testDeleteByQualifiedName() throws Exception {
final Referenceable entity = new Referenceable(DATABASE_TYPE);
Referenceable entity = new Referenceable(DATABASE_TYPE);
final String dbName = "db" + randomString();
entity.set("name", dbName);
entity.set("description", randomString());
serviceClient.createEntity(entity);
final String dbId = serviceClient.createEntity(entity).getString(0);
sendHookMessage(
new HookNotification.EntityDeleteRequest(TEST_USER, DATABASE_TYPE, "name", dbName));
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
JSONArray results = serviceClient.searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE,
dbName));
return results.length() == 0;
Referenceable getEntity = serviceClient.getEntity(dbId);
return getEntity.getId().getState() == Id.EntityState.DELETED;
}
});
}
......
......@@ -810,15 +810,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
// Verify entities were deleted from the repository.
for (String guid : deletedGuidsList) {
try {
serviceClient.getEntity(guid);
Assert.fail(AtlasServiceException.class.getSimpleName() +
" was expected but not thrown. The entity with guid " + guid +
" still exists in the repository after being deleted.");
}
catch (AtlasServiceException e) {
Assert.assertTrue(e.getMessage().contains(Integer.toString(Response.Status.NOT_FOUND.getStatusCode())));
}
Referenceable entity = serviceClient.getEntity(guid);
assertEquals(entity.getId().getState(), Id.EntityState.DELETED);
}
}
......@@ -844,15 +837,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
// Verify entities were deleted from the repository.
for (String guid : deletedGuidsList) {
try {
serviceClient.getEntity(guid);
Assert.fail(AtlasServiceException.class.getSimpleName() +
" was expected but not thrown. The entity with guid " + guid +
" still exists in the repository after being deleted.");
}
catch (AtlasServiceException e) {
Assert.assertTrue(e.getMessage().contains(Integer.toString(Response.Status.NOT_FOUND.getStatusCode())));
}
Referenceable entity = serviceClient.getEntity(guid);
assertEquals(entity.getId().getState(), Id.EntityState.DELETED);
}
}
......@@ -874,15 +860,8 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
// Verify entities were deleted from the repository.
for (String guid : deletedGuidsList) {
try {
serviceClient.getEntity(guid);
Assert.fail(AtlasServiceException.class.getSimpleName() +
" was expected but not thrown. The entity with guid " + guid +
" still exists in the repository after being deleted.");
}
catch (AtlasServiceException e) {
Assert.assertTrue(e.getMessage().contains(Integer.toString(Response.Status.NOT_FOUND.getStatusCode())));
}
Referenceable entity = serviceClient.getEntity(guid);
assertEquals(entity.getId().getState(), Id.EntityState.DELETED);
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment