Commit 6b33bcf6 by Shwetha GS

ATLAS-3 Mixed Index creation fails with Date types (suma.shivaprasad via shwethags)

parent 901aca9d
...@@ -338,6 +338,7 @@ ...@@ -338,6 +338,7 @@
<spray.version>1.3.1</spray.version> <spray.version>1.3.1</spray.version>
<guava.version>14.0</guava.version> <guava.version>14.0</guava.version>
<fastutil.version>6.5.16</fastutil.version> <fastutil.version>6.5.16</fastutil.version>
<guice.version>4.0</guice.version>
<PermGen>64m</PermGen> <PermGen>64m</PermGen>
<MaxPermGen>512m</MaxPermGen> <MaxPermGen>512m</MaxPermGen>
...@@ -563,25 +564,25 @@ ...@@ -563,25 +564,25 @@
<dependency> <dependency>
<groupId>com.google.inject</groupId> <groupId>com.google.inject</groupId>
<artifactId>guice</artifactId> <artifactId>guice</artifactId>
<version>4.0</version> <version>${guice.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-throwingproviders</artifactId> <artifactId>guice-throwingproviders</artifactId>
<version>4.0</version> <version>${guice.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId> <artifactId>guice-multibindings</artifactId>
<version>4.0</version> <version>${guice.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-servlet</artifactId> <artifactId>guice-servlet</artifactId>
<version>4.0</version> <version>${guice.version}</version>
</dependency> </dependency>
<dependency> <dependency>
...@@ -591,6 +592,13 @@ ...@@ -591,6 +592,13 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.skyscreamer</groupId>
<artifactId>jsonassert</artifactId>
<version>1.2.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>joda-time</groupId> <groupId>joda-time</groupId>
<artifactId>joda-time</artifactId> <artifactId>joda-time</artifactId>
<version>2.5</version> <version>2.5</version>
......
...@@ -4,6 +4,7 @@ Apache Atlas Release Notes ...@@ -4,6 +4,7 @@ Apache Atlas Release Notes
--trunk - unreleased --trunk - unreleased
INCOMPATIBLE CHANGES: INCOMPATIBLE CHANGES:
ATLAS-3 Mixed Index creation fails with Date types (suma.shivaprasad via shwethags)
ALL CHANGES: ALL CHANGES:
ATLAS-31 ATLAS build fails with clean repo (suma.shivaprasad via shwethags) ATLAS-31 ATLAS build fails with clean repo (suma.shivaprasad via shwethags)
......
...@@ -55,6 +55,11 @@ ...@@ -55,6 +55,11 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject</groupId> <groupId>com.google.inject</groupId>
<artifactId>guice</artifactId> <artifactId>guice</artifactId>
</dependency> </dependency>
...@@ -146,6 +151,11 @@ ...@@ -146,6 +151,11 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.skyscreamer</groupId>
<artifactId>jsonassert</artifactId>
</dependency>
<dependency>
<groupId>org.testng</groupId> <groupId>org.testng</groupId>
<artifactId>testng</artifactId> <artifactId>testng</artifactId>
</dependency> </dependency>
......
...@@ -857,7 +857,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -857,7 +857,9 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
} else if (attributeInfo.dataType() == DataTypes.BIGDECIMAL_TYPE) { } else if (attributeInfo.dataType() == DataTypes.BIGDECIMAL_TYPE) {
propertyValue = typedInstance.getBigDecimal(attributeInfo.name); propertyValue = typedInstance.getBigDecimal(attributeInfo.name);
} else if (attributeInfo.dataType() == DataTypes.DATE_TYPE) { } else if (attributeInfo.dataType() == DataTypes.DATE_TYPE) {
propertyValue = typedInstance.getDate(attributeInfo.name); final Date dateVal = typedInstance.getDate(attributeInfo.name);
//Convert Property value to Long while persisting
propertyValue = dateVal.getTime();
} }
addProperty(instanceVertex, vertexPropertyName, propertyValue); addProperty(instanceVertex, vertexPropertyName, propertyValue);
} }
...@@ -1180,7 +1182,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1180,7 +1182,8 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
typedInstance typedInstance
.setBigDecimal(attributeInfo.name, instanceVertex.<BigDecimal>getProperty(vertexPropertyName)); .setBigDecimal(attributeInfo.name, instanceVertex.<BigDecimal>getProperty(vertexPropertyName));
} else if (attributeInfo.dataType() == DataTypes.DATE_TYPE) { } else if (attributeInfo.dataType() == DataTypes.DATE_TYPE) {
typedInstance.setDate(attributeInfo.name, instanceVertex.<Date>getProperty(vertexPropertyName)); final Long dateVal = instanceVertex.<Long>getProperty(vertexPropertyName);
typedInstance.setDate(attributeInfo.name, new Date(dateVal));
} }
} }
} }
......
...@@ -65,7 +65,6 @@ public class GraphBackedSearchIndexer implements SearchIndexer { ...@@ -65,7 +65,6 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
this.titanGraph = graphProvider.get(); this.titanGraph = graphProvider.get();
/* Create the transaction for indexing. /* Create the transaction for indexing.
* Commit/rollback is expected to be called from the caller.
*/ */
management = titanGraph.getManagementSystem(); management = titanGraph.getManagementSystem();
initialize(); initialize();
...@@ -257,7 +256,8 @@ public class GraphBackedSearchIndexer implements SearchIndexer { ...@@ -257,7 +256,8 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
} else if (dataType == DataTypes.BIGDECIMAL_TYPE) { } else if (dataType == DataTypes.BIGDECIMAL_TYPE) {
return BigDecimal.class; return BigDecimal.class;
} else if (dataType == DataTypes.DATE_TYPE) { } else if (dataType == DataTypes.DATE_TYPE) {
return Date.class; //Indexing with date converted to long as of now since Titan is yet to add support for Date type with mixed indexes
return Long.class;
} }
...@@ -333,9 +333,7 @@ public class GraphBackedSearchIndexer implements SearchIndexer { ...@@ -333,9 +333,7 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
} }
private boolean checkIfMixedIndexApplicable(Class propertyClass) { private boolean checkIfMixedIndexApplicable(Class propertyClass) {
//TODO - Check why date types are failing in ES/Solr if (propertyClass == Boolean.class || propertyClass == BigDecimal.class || propertyClass == BigInteger.class) {
if (propertyClass == Boolean.class || propertyClass == BigDecimal.class || propertyClass == BigInteger.class
|| propertyClass == Date.class) {
return false; return false;
} }
return true; return true;
......
...@@ -490,6 +490,8 @@ object Expressions { ...@@ -490,6 +490,8 @@ object Expressions {
def string(rawValue: Any) = literal(DataTypes.STRING_TYPE, rawValue) def string(rawValue: Any) = literal(DataTypes.STRING_TYPE, rawValue)
def date(rawValue: Any) = literal(DataTypes.DATE_TYPE, rawValue)
case class ArithmeticExpression(symbol: String, case class ArithmeticExpression(symbol: String,
left: Expression, left: Expression,
right: Expression) right: Expression)
...@@ -598,7 +600,10 @@ object Expressions { ...@@ -598,7 +600,10 @@ object Expressions {
throw new UnresolvedException(this, throw new UnresolvedException(this,
s"datatype. Can not resolve due to unresolved children") s"datatype. Can not resolve due to unresolved children")
} }
if (left.dataType != DataTypes.STRING_TYPE || right.dataType != DataTypes.STRING_TYPE) {
if(left.dataType == DataTypes.DATE_TYPE) {
DataTypes.DATE_TYPE
} else if (left.dataType != DataTypes.STRING_TYPE || right.dataType != DataTypes.STRING_TYPE) {
TypeUtils.combinedType(left.dataType, right.dataType) TypeUtils.combinedType(left.dataType, right.dataType)
} }
DataTypes.BOOLEAN_TYPE DataTypes.BOOLEAN_TYPE
......
...@@ -18,6 +18,8 @@ ...@@ -18,6 +18,8 @@
package org.apache.atlas.query package org.apache.atlas.query
import java.util.Date
import com.thinkaurelius.titan.core.TitanVertex import com.thinkaurelius.titan.core.TitanVertex
import com.tinkerpop.blueprints.Direction import com.tinkerpop.blueprints.Direction
import org.apache.atlas.query.Expressions.{ComparisonExpression, ExpressionException} import org.apache.atlas.query.Expressions.{ComparisonExpression, ExpressionException}
...@@ -304,6 +306,10 @@ object GraphPersistenceStrategy1 extends GraphPersistenceStrategies { ...@@ -304,6 +306,10 @@ object GraphPersistenceStrategy1 extends GraphPersistenceStrategies {
case x: FloatType => i.setFloat(aInfo.name, v.getProperty[java.lang.Float](fName)) case x: FloatType => i.setFloat(aInfo.name, v.getProperty[java.lang.Float](fName))
case x: DoubleType => i.setDouble(aInfo.name, v.getProperty[java.lang.Double](fName)) case x: DoubleType => i.setDouble(aInfo.name, v.getProperty[java.lang.Double](fName))
case x: StringType => i.setString(aInfo.name, v.getProperty[java.lang.String](fName)) case x: StringType => i.setString(aInfo.name, v.getProperty[java.lang.String](fName))
case x: DateType => {
val dateVal = v.getProperty[java.lang.Long](fName)
i.setDate(aInfo.name, new Date(dateVal))
}
case _ => throw new UnsupportedOperationException(s"load for ${aInfo.dataType()} not supported") case _ => throw new UnsupportedOperationException(s"load for ${aInfo.dataType()} not supported")
} }
} }
......
...@@ -19,7 +19,9 @@ ...@@ -19,7 +19,9 @@
package org.apache.atlas.query package org.apache.atlas.query
import org.apache.atlas.query.Expressions._ import org.apache.atlas.query.Expressions._
import org.apache.atlas.typesystem.types.{TypeSystem, DataTypes}
import org.apache.atlas.typesystem.types.DataTypes.TypeCategory import org.apache.atlas.typesystem.types.DataTypes.TypeCategory
import org.joda.time.format.ISODateTimeFormat
import scala.collection.mutable import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.ArrayBuffer
...@@ -227,12 +229,29 @@ class GremlinTranslator(expr: Expression, ...@@ -227,12 +229,29 @@ class GremlinTranslator(expr: Expression,
} }
} }
case c@ComparisonExpression(symb, f@FieldExpression(fieldName, fInfo, ch), l) => { case c@ComparisonExpression(symb, f@FieldExpression(fieldName, fInfo, ch), l) => {
val QUOTE = "\"";
val fieldGremlinExpr = s"${gPersistenceBehavior.fieldNameInVertex(fInfo.dataType, fInfo.attrInfo)}" val fieldGremlinExpr = s"${gPersistenceBehavior.fieldNameInVertex(fInfo.dataType, fInfo.attrInfo)}"
ch match { ch match {
case Some(child) => { case Some(child) => {
s"""${genQuery(child, inSelect)}.has("$fieldGremlinExpr", ${gPersistenceBehavior.gremlinCompOp(c)}, $l)""" s"""${genQuery(child, inSelect)}.has("$fieldGremlinExpr", ${gPersistenceBehavior.gremlinCompOp(c)}, $l)"""
} }
case None => s"""has("$fieldGremlinExpr", ${gPersistenceBehavior.gremlinCompOp(c)}, $l)""" case None => {
if (fInfo.attrInfo.dataType == DataTypes.DATE_TYPE) {
try {
//Accepts both date, datetime formats
val dateStr = l.toString.stripPrefix(QUOTE).stripSuffix(QUOTE)
val dateVal = ISODateTimeFormat.dateOptionalTimeParser().parseDateTime(dateStr).getMillis
s"""has("$fieldGremlinExpr", ${gPersistenceBehavior.gremlinCompOp(c)},${dateVal})"""
} catch {
case pe: java.text.ParseException =>
throw new GremlinTranslationException(c,
"Date format " + l + " not supported. Should be of the format " + TypeSystem.getInstance().getDateFormat.toPattern);
}
}
else
s"""has("$fieldGremlinExpr", ${gPersistenceBehavior.gremlinCompOp(c)}, $l)"""
}
} }
} }
case fil@FilterExpression(child, condExpr) => { case fil@FilterExpression(child, condExpr) => {
......
...@@ -169,7 +169,8 @@ public class GraphBackedDiscoveryServiceTest { ...@@ -169,7 +169,8 @@ public class GraphBackedDiscoveryServiceTest {
@DataProvider(name = "dslQueriesProvider") @DataProvider(name = "dslQueriesProvider")
private Object[][] createDSLQueries() { private Object[][] createDSLQueries() {
return new String[][]{{"from DB"}, {"DB"}, {"DB where DB.name=\"Reporting\""}, {"DB DB.name = \"Reporting\""}, return new String[][]{
{"from DB"}, {"DB"}, {"DB where DB.name=\"Reporting\""}, {"DB DB.name = \"Reporting\""},
{"DB where DB.name=\"Reporting\" select name, owner"}, {"DB has name"}, {"DB, Table"}, {"DB where DB.name=\"Reporting\" select name, owner"}, {"DB has name"}, {"DB, Table"},
{"DB is JdbcAccess"}, {"DB is JdbcAccess"},
/* /*
...@@ -186,6 +187,8 @@ public class GraphBackedDiscoveryServiceTest { ...@@ -186,6 +187,8 @@ public class GraphBackedDiscoveryServiceTest {
{"DB where DB is JdbcAccess"}, {"DB where DB has name"}, {"DB Table"}, {"DB where DB has name"}, {"DB where DB is JdbcAccess"}, {"DB where DB has name"}, {"DB Table"}, {"DB where DB has name"},
{"DB as db1 Table where (db1.name = \"Reporting\")"}, {"DB as db1 Table where (db1.name = \"Reporting\")"},
{"DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 "}, {"DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 "},
{"Table where (name = \"sales_fact\" and created > \"2014-01-01\" ) select name as _col_0, created as _col_1 "},
{"Table where (name = \"sales_fact\" and created > \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, created as _col_1 "},
/* /*
todo: does not work todo: does not work
{"DB where (name = \"Reporting\") and ((createTime + 1) > 0)"}, {"DB where (name = \"Reporting\") and ((createTime + 1) > 0)"},
......
...@@ -21,15 +21,20 @@ package org.apache.atlas.repository.graph; ...@@ -21,15 +21,20 @@ package org.apache.atlas.repository.graph;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.TitanFactory; import com.thinkaurelius.titan.core.TitanFactory;
import com.thinkaurelius.titan.core.TitanGraph; import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.TitanGraphQuery;
import com.thinkaurelius.titan.core.TitanIndexQuery; import com.thinkaurelius.titan.core.TitanIndexQuery;
import com.thinkaurelius.titan.core.schema.TitanGraphIndex;
import com.thinkaurelius.titan.diskstorage.BackendException; import com.thinkaurelius.titan.diskstorage.BackendException;
import com.thinkaurelius.titan.diskstorage.configuration.ReadConfiguration; import com.thinkaurelius.titan.diskstorage.configuration.ReadConfiguration;
import com.thinkaurelius.titan.diskstorage.configuration.backend.CommonsConfiguration; import com.thinkaurelius.titan.diskstorage.configuration.backend.CommonsConfiguration;
import com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration; import com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration;
import com.tinkerpop.blueprints.Compare; import com.tinkerpop.blueprints.Compare;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.GraphQuery; import com.tinkerpop.blueprints.GraphQuery;
import com.tinkerpop.blueprints.Predicate;
import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.Vertex;
import org.apache.atlas.GraphTransaction; import org.apache.atlas.GraphTransaction;
import org.apache.atlas.repository.BaseTest;
import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.atlas.typesystem.ITypedReferenceableInstance; import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.Referenceable; import org.apache.atlas.typesystem.Referenceable;
...@@ -55,6 +60,7 @@ import org.testng.annotations.Test; ...@@ -55,6 +60,7 @@ import org.testng.annotations.Test;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date;
import java.util.Map; import java.util.Map;
import java.util.Random; import java.util.Random;
...@@ -157,6 +163,7 @@ public class GraphRepoMapperScaleTest { ...@@ -157,6 +163,7 @@ public class GraphRepoMapperScaleTest {
searchWithOutIndex("hive_table_type.name", "bar-999"); searchWithOutIndex("hive_table_type.name", "bar-999");
searchWithIndex("hive_table_type.name", "bar-999"); searchWithIndex("hive_table_type.name", "bar-999");
searchWithIndex("hive_table_type.created", Compare.GREATER_THAN_EQUAL, BaseTest.TEST_DATE_IN_LONG);
for (int index = 500; index < 600; index++) { for (int index = 500; index < 600; index++) {
searchWithIndex("hive_table_type.name", "bar-" + index); searchWithIndex("hive_table_type.name", "bar-" + index);
...@@ -190,6 +197,21 @@ public class GraphRepoMapperScaleTest { ...@@ -190,6 +197,21 @@ public class GraphRepoMapperScaleTest {
} }
} finally { } finally {
System.out.println("Search on [" + key + "=" + value + "] returned results: " + count + ", took " + ( System.out.println("Search on [" + key + "=" + value + "] returned results: " + count + ", took " + (
System.currentTimeMillis() - start) + " ms");
}
}
private void searchWithIndex(String key, Predicate searchPredicate, Object value) {
TitanGraph graph = graphProvider.get();
long start = System.currentTimeMillis();
int count = 0;
try {
GraphQuery query = graph.query().has(key, searchPredicate, value);
for (Vertex ignored : query.vertices()) {
count++;
}
} finally {
System.out.println("Search on [" + key + "=" + value + "] returned results: " + count + ", took " + (
System.currentTimeMillis() - start) + " ms"); System.currentTimeMillis() - start) + " ms");
} }
} }
...@@ -222,6 +244,7 @@ public class GraphRepoMapperScaleTest { ...@@ -222,6 +244,7 @@ public class GraphRepoMapperScaleTest {
TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE), TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE), TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("created", DataTypes.DATE_TYPE),
// enum // enum
new AttributeDefinition("tableType", "table_type", Multiplicity.REQUIRED, false, null), new AttributeDefinition("tableType", "table_type", Multiplicity.REQUIRED, false, null),
// array of strings // array of strings
...@@ -262,6 +285,7 @@ public class GraphRepoMapperScaleTest { ...@@ -262,6 +285,7 @@ public class GraphRepoMapperScaleTest {
tableInstance.set("name", TABLE_NAME + "-" + uberIndex); tableInstance.set("name", TABLE_NAME + "-" + uberIndex);
tableInstance.set("description", "bar table" + "-" + uberIndex); tableInstance.set("description", "bar table" + "-" + uberIndex);
tableInstance.set("type", "managed"); tableInstance.set("type", "managed");
tableInstance.set("created", new Date(BaseTest.TEST_DATE_IN_LONG));
tableInstance.set("tableType", 1); // enum tableInstance.set("tableType", 1); // enum
// refer to an existing class // refer to an existing class
......
...@@ -19,12 +19,14 @@ ...@@ -19,12 +19,14 @@
package org.apache.atlas.query package org.apache.atlas.query
import java.io.File import java.io.File
import java.util.UUID import java.util.{Date, UUID}
import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.atomic.AtomicInteger
import javax.script.{Bindings, ScriptEngine, ScriptEngineManager} import javax.script.{Bindings, ScriptEngine, ScriptEngineManager}
import com.thinkaurelius.titan.core.TitanGraph import com.thinkaurelius.titan.core.TitanGraph
import com.typesafe.config.ConfigFactory import com.typesafe.config.ConfigFactory
import org.apache.atlas.repository.BaseTest
import org.apache.atlas.typesystem.types.TypeSystem
import org.apache.commons.io.FileUtils import org.apache.commons.io.FileUtils
import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.ArrayBuffer
...@@ -54,18 +56,19 @@ object HiveTitanSample { ...@@ -54,18 +56,19 @@ object HiveTitanSample {
this.getClass.getDeclaredFields filter (_.getName != "traits") foreach { f => this.getClass.getDeclaredFields filter (_.getName != "traits") foreach { f =>
f.setAccessible(true) f.setAccessible(true)
var fV = f.get(this) val fV = f.get(this)
fV = fV match { val convertedVal = fV match {
case _: String => s""""$fV"""" case _: String => s""""$fV""""
case d: Date => d.getTime
case _ => fV case _ => fV
} }
fV match { convertedVal match {
case x: Vertex => addEdge(x, s"${this.getClass.getSimpleName}.${f.getName}", edges) case x: Vertex => addEdge(x, s"${this.getClass.getSimpleName}.${f.getName}", edges)
case l: List[_] => l.foreach(x => addEdge(x.asInstanceOf[Vertex], case l: List[_] => l.foreach(x => addEdge(x.asInstanceOf[Vertex],
s"${this.getClass.getSimpleName}.${f.getName}", edges)) s"${this.getClass.getSimpleName}.${f.getName}", edges))
case _ => sb.append( s""", "${f.getName}" : $fV""") case _ => sb.append( s""", "${f.getName}" : $convertedVal""")
sb.append( s""", "${this.getClass.getSimpleName}.${f.getName}" : $fV""") sb.append( s""", "${this.getClass.getSimpleName}.${f.getName}" : $convertedVal""")
} }
} }
...@@ -127,17 +130,23 @@ object HiveTitanSample { ...@@ -127,17 +130,23 @@ object HiveTitanSample {
_id: String = "" + nextVertexId.incrementAndGet()) extends Instance _id: String = "" + nextVertexId.incrementAndGet()) extends Instance
case class Table(name: String, db: DB, sd: StorageDescriptor, case class Table(name: String, db: DB, sd: StorageDescriptor,
created: Date,
traits: Option[List[Trait]] = None, traits: Option[List[Trait]] = None,
_id: String = "" + nextVertexId.incrementAndGet()) extends Instance _id: String = "" + nextVertexId.incrementAndGet()) extends Instance
case class TableDef(name: String, db: DB, inputFormat: String, outputFormat: String, case class TableDef(name: String, db: DB, inputFormat: String, outputFormat: String,
columns: List[(String, String, Option[List[Trait]])], columns: List[(String, String, Option[List[Trait]])],
traits: Option[List[Trait]] = None) { traits: Option[List[Trait]] = None,
created: Option[Date] = None) {
val createdDate : Date = created match {
case Some(x) => x
case None => new Date(BaseTest.TEST_DATE_IN_LONG)
}
val sd = StorageDescriptor(inputFormat, outputFormat) val sd = StorageDescriptor(inputFormat, outputFormat)
val colDefs = columns map { c => val colDefs = columns map { c =>
Column(c._1, c._2, sd, c._3) Column(c._1, c._2, sd, c._3)
} }
val tablDef = Table(name, db, sd, traits) val tablDef = Table(name, db, sd, createdDate, traits)
def toGSon(vertices: ArrayBuffer[String], def toGSon(vertices: ArrayBuffer[String],
edges: ArrayBuffer[String]): Unit = { edges: ArrayBuffer[String]): Unit = {
...@@ -167,6 +176,7 @@ object HiveTitanSample { ...@@ -167,6 +176,7 @@ object HiveTitanSample {
("time_id", "int", None), ("time_id", "int", None),
("product_id", "int", None), ("product_id", "int", None),
("customer_id", "int", None), ("customer_id", "int", None),
("created", "date", None),
("sales", "double", Some(List(Metric()))) ("sales", "double", Some(List(Metric())))
)) ))
val productDim = TableDef("product_dim", val productDim = TableDef("product_dim",
...@@ -269,6 +279,7 @@ object HiveTitanSample { ...@@ -269,6 +279,7 @@ object HiveTitanSample {
FileUtils.writeStringToFile(new File(fileName), toGSon()) FileUtils.writeStringToFile(new File(fileName), toGSon())
} }
val GremlinQueries = List( val GremlinQueries = List(
// 1. List all DBs // 1. List all DBs
"""g.V.has("typeName", "DB")""", """g.V.has("typeName", "DB")""",
......
...@@ -28,7 +28,10 @@ import com.typesafe.config.{Config, ConfigFactory} ...@@ -28,7 +28,10 @@ import com.typesafe.config.{Config, ConfigFactory}
import org.apache.atlas.typesystem.types._ import org.apache.atlas.typesystem.types._
import org.apache.commons.configuration.{Configuration, ConfigurationException, MapConfiguration} import org.apache.commons.configuration.{Configuration, ConfigurationException, MapConfiguration}
import org.apache.commons.io.FileUtils import org.apache.commons.io.FileUtils
import org.json.JSONObject
import org.scalatest.{Assertions, BeforeAndAfterAll, FunSuite} import org.scalatest.{Assertions, BeforeAndAfterAll, FunSuite}
import org.skyscreamer.jsonassert.JSONAssert
trait GraphUtils { trait GraphUtils {
...@@ -96,7 +99,8 @@ object QueryTestsUtils extends GraphUtils { ...@@ -96,7 +99,8 @@ object QueryTestsUtils extends GraphUtils {
Array( Array(
attrDef("name", DataTypes.STRING_TYPE), attrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("db", "DB", Multiplicity.REQUIRED, false, null), new AttributeDefinition("db", "DB", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("sd", "StorageDesc", Multiplicity.REQUIRED, false, null) new AttributeDefinition("sd", "StorageDesc", Multiplicity.REQUIRED, false, null),
attrDef("created", DataTypes.DATE_TYPE)
)) ))
def loadProcessClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "LoadProcess", null, def loadProcessClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "LoadProcess", null,
...@@ -160,7 +164,9 @@ trait BaseGremlinTest { ...@@ -160,7 +164,9 @@ trait BaseGremlinTest {
if (expected != null) { if (expected != null) {
val a = STRUCT_NAME_REGEX.replaceAllIn(rJ, "") val a = STRUCT_NAME_REGEX.replaceAllIn(rJ, "")
val b = STRUCT_NAME_REGEX.replaceAllIn(expected, "") val b = STRUCT_NAME_REGEX.replaceAllIn(expected, "")
Assertions.assert(a == b) val actualjsonObj = new JSONObject(a)
val expectedjsonObj = new JSONObject(b)
JSONAssert.assertEquals(expectedjsonObj, actualjsonObj, false)
} else { } else {
println(rJ) println(rJ)
} }
......
...@@ -48,7 +48,7 @@ public class TypeSystem { ...@@ -48,7 +48,7 @@ public class TypeSystem {
private static ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal() { private static ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal() {
@Override @Override
public SimpleDateFormat initialValue() { public SimpleDateFormat initialValue() {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX"); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
return dateFormat; return dateFormat;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment