Commit fad5c8e6 by gss2002

Commit fixes to HiveLineageInfo HiveHook and hivetypes.pom.xml to remove

bad dependency on calcite-0.9.2-incubating-snapshot which is no longer useable and Hive POM needs to be updated at Central for Hive 0.14 and Hive 1.0 to make it calcite-0.9.2-incubating. Removed where/groupby parsing for now as value is limited and doesnt parse nested queries well. Handle createtable/createview and prepare to handle alters and inserts
parent 2181803a
......@@ -56,26 +56,86 @@
<groupId>org.apache.hive</groupId>
<artifactId>hive-metastore</artifactId>
<version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-common</artifactId>
<version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-cli</artifactId>
<version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>0.14.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
<version>0.9.2-incubating</version>
</dependency>
<dependency>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
<version>0.9.2-incubating</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
......
......@@ -96,20 +96,36 @@ public class HiveLineageInfo implements NodeProcessor {
switch (pt.getToken().getType()) {
case HiveParser.TOK_TABREF:
sourceTables = new ArrayList<SourceTables>();
case HiveParser.TOK_FROM:
LOG.debug("From Table Dump: "+pt.dump());
fromTableDump(pt);
this.hlb.setSourceTables(sourceTables);
break;
case HiveParser.TOK_SELECT:
queryColumns = new ArrayList<QueryColumns>();
LOG.debug("Column Dump: "+pt.dump());
columnTableDump(pt);
this.hlb.setQueryColumns(queryColumns);
break;
case HiveParser.TOK_SELECTDI:
LOG.debug("Column Dump: "+pt.dump());
columnTableDump(pt);
break;
case HiveParser.TOK_CREATETABLE:
createColumns = new ArrayList<CreateColumns>();
LOG.debug("CREATABLE DUMP: "+pt.dump());
createTableDump(pt);
break;
case HiveParser.TOK_CREATEVIEW:
createColumns = new ArrayList<CreateColumns>();
LOG.debug("CREATEVIEW DUMP: "+pt.dump());
createTableDump(pt);
break;
/*
* Currently disabling processing of WHERE and GROUPBY NO VALUE RIGHT NOW
*
case HiveParser.TOK_WHERE:
whereClause = new ArrayList<WhereClause>();
LOG.debug("WHERE CLAUSE DUMP: "+pt.dump());
......@@ -124,11 +140,7 @@ public class HiveLineageInfo implements NodeProcessor {
this.hlb.setGroupBy(groupBy);
break;
case HiveParser.TOK_CREATETABLE:
createColumns = new ArrayList<CreateColumns>();
LOG.debug("CREATABLE DUMP: "+pt.dump());
createTableDump(pt);
break;
*/
}
return null;
}
......@@ -151,30 +163,30 @@ public class HiveLineageInfo implements NodeProcessor {
if (nodeIn.getParent().getText().equalsIgnoreCase(".")) {
ASTNode checkOrAnd = (ASTNode) nodeIn.getParent().getParent().getChild(1).getParent().getParent();
if (checkOrAnd.getType() == HiveParser.KW_AND || checkOrAnd.getType() == HiveParser.KW_OR) {
LOG.info("WHERE:: "+checkOrAnd.getText());
LOG.debug("WHERE:: "+checkOrAnd.getText());
whreClse.setColumnOperator(checkOrAnd.getText());
}
LOG.info("Table Alias:: "+nodeIn.getChild(0).getText());
LOG.debug("Table Alias:: "+nodeIn.getChild(0).getText());
whreClse.setTbAliasOrName(nodeIn.getChild(0).getText());
LOG.info("Delimiter:: "+nodeIn.getParent().getText());
LOG.info("Column:: "+nodeIn.getParent().getChild(1).getText());
LOG.debug("Delimiter:: "+nodeIn.getParent().getText());
LOG.debug("Column:: "+nodeIn.getParent().getChild(1).getText());
whreClse.setColumnName(nodeIn.getParent().getChild(1).getText());
LOG.info("Column Qualifer:: "+nodeIn.getParent().getParent().getChild(1).getParent().getText());
LOG.debug("Column Qualifer:: "+nodeIn.getParent().getParent().getChild(1).getParent().getText());
whreClse.setColumnOperator(nodeIn.getParent().getParent().getChild(1).getParent().getText());
LOG.info("Column Value:: "+nodeIn.getParent().getParent().getChild(1).getText());
LOG.debug("Column Value:: "+nodeIn.getParent().getParent().getChild(1).getText());
whreClse.setColumnValue(nodeIn.getParent().getParent().getChild(1).getText());
} else {
ASTNode checkOrAnd = (ASTNode) nodeIn.getParent().getParent().getChild(1).getParent();
if (checkOrAnd.getType() == HiveParser.KW_AND || checkOrAnd.getType() == HiveParser.KW_OR) {
LOG.info("WHERE:: "+checkOrAnd.getText());
LOG.debug("WHERE:: "+checkOrAnd.getText());
whreClse.setColumnOperator(checkOrAnd.getText());
}
LOG.info("Column:: = "+nodeIn.getChild(0).getText());
LOG.debug("Column:: = "+nodeIn.getChild(0).getText());
whreClse.setColumnName(nodeIn.getChild(0).getText());
//LOG.info("Delimiter "+nodeIn.getParent().getText());
LOG.info("Column Qualifer:: "+nodeIn.getParent().getChild(1).getParent().getText());
LOG.debug("Column Qualifer:: "+nodeIn.getParent().getChild(1).getParent().getText());
whreClse.setColumnOperator(nodeIn.getParent().getChild(1).getParent().getText());
LOG.info("Column Value:: "+nodeIn.getParent().getChild(1).getText());
LOG.debug("Column Value:: "+nodeIn.getParent().getChild(1).getText());
whreClse.setColumnValue(nodeIn.getParent().getChild(1).getText());
}
whereClause.add(whreClse);
......@@ -206,12 +218,12 @@ public class HiveLineageInfo implements NodeProcessor {
GroupBy grpBy = hlb.new GroupBy();
ASTNode parentNode = (ASTNode) nodeIn.getParent();
if (parentNode.getText().equalsIgnoreCase(".")) {
LOG.info("GroupBy TableAlias: "+nodeIn.getChild(0).getText());
LOG.debug("GroupBy TableAlias: "+nodeIn.getChild(0).getText());
grpBy.setTbAliasOrName(nodeIn.getChild(0).getText());
LOG.info("GroupBy Column:: "+parentNode.getChild(1).getText());
LOG.debug("GroupBy Column:: "+parentNode.getChild(1).getText());
grpBy.setColumnName(parentNode.getChild(1).getText());
} else {
LOG.info("GroupBy Column: "+nodeIn.getChild(0).getText());
LOG.debug("GroupBy Column: "+nodeIn.getChild(0).getText());
grpBy.setColumnName(nodeIn.getChild(0).getText());
}
groupBy.add(grpBy);
......@@ -232,15 +244,23 @@ public class HiveLineageInfo implements NodeProcessor {
public void createTableDump(ASTNode nodeIn) {
counter = 0;
if (nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME) != null) {
if (nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME) != null && nodeIn.getAncestor(HiveParser.TOK_WHERE) == null) {
LOG.info("Create TableName:: "+nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getText());
if (nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChildCount() == 2) {
LOG.debug("To DataBaseName:: "+nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(0).getText());
hlb.setDatabaseName(nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(0).getText());
LOG.debug("To TableName:: "+nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(1).getText());
hlb.setTableName(nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(1).getText());
} else {
LOG.debug("To TableName:: "+nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(0).getText());
hlb.setTableName(nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(0).getText());
}
if (nodeIn.getFirstChildWithType(HiveParser.TOK_TABLELOCATION) != null) {
LOG.info("Create Table Location:: "+nodeIn.getFirstChildWithType(HiveParser.TOK_TABLELOCATION).getText());
}
if (nodeIn.getFirstChildWithType(HiveParser.TOK_TABLELOCATION) != null && nodeIn.getAncestor(HiveParser.TOK_WHERE) == null) {
LOG.debug("Create Table Location:: "+nodeIn.getFirstChildWithType(HiveParser.TOK_TABLELOCATION).getText());
hlb.setTableLocation(nodeIn.getFirstChildWithType(HiveParser.TOK_TABLELOCATION).getChild(0).getText());
}
if (nodeIn.getFirstChildWithType(HiveParser.TOK_TABCOLLIST) != null ) {
if (nodeIn.getFirstChildWithType(HiveParser.TOK_TABCOLLIST) != null && nodeIn.getAncestor(HiveParser.TOK_WHERE) == null) {
ctdump((ASTNode)nodeIn.getFirstChildWithType(HiveParser.TOK_TABCOLLIST).getParent());
hlb.setCreateColumns(createColumns);
}
......@@ -253,9 +273,9 @@ public class HiveLineageInfo implements NodeProcessor {
boolean parseChild = true;
if (nodeIn.getType() == HiveParser.TOK_TABCOL) {
CreateColumns crtClmns = hlb.new CreateColumns();
LOG.info("Create Column Name:: "+nodeIn.getChild(0).getText());
LOG.debug("Create Column Name:: "+nodeIn.getChild(0).getText());
crtClmns.setColumnName(nodeIn.getChild(0).getText());
LOG.info("Create Column Type:: "+nodeIn.getChild(1).getText());
LOG.debug("Create Column Type:: "+nodeIn.getChild(1).getText());
crtClmns.setColumnType(nodeIn.getChild(1).getText());
createColumns.add(crtClmns);
}
......@@ -276,27 +296,27 @@ public class HiveLineageInfo implements NodeProcessor {
public void fromTableDump(ASTNode nodeIn) {
counter = 0;
ftdump(nodeIn);
}
}
/**
* Walks the fromTable Tree called by fromTableDump
*/
private void ftdump(ASTNode nodeIn) {
private void ftdump(ASTNode nodeIn) {
boolean parseChild = true;
if (nodeIn.getType() == HiveParser.TOK_TABNAME && nodeIn.getParent().getType() == HiveParser.TOK_TABREF) {
if (nodeIn.getType() == HiveParser.TOK_TABNAME && nodeIn.getParent().getType() == HiveParser.TOK_TABREF && nodeIn.getAncestor(HiveParser.TOK_WHERE) == null) {
SourceTables hlbSbls = hlb.new SourceTables();
if (nodeIn.getChildCount() == 2) {
LOG.info("From DBName:: "+nodeIn.getChild(0).getText());
LOG.debug("From DBName:: "+nodeIn.getChild(0).getText());
hlbSbls.setDatabaseName(nodeIn.getChild(0).getText());
LOG.info("From TableName:: "+nodeIn.getChild(1).getText());
LOG.debug("From TableName:: "+nodeIn.getChild(1).getText());
hlbSbls.setTableName(nodeIn.getChild(1).getText());
} else {
LOG.info("From TableName:: "+nodeIn.getChild(0).getText());
LOG.debug("From TableName:: "+nodeIn.getChild(0).getText());
hlbSbls.setTableName(nodeIn.getChild(0).getText());
}
if (nodeIn.getType() == HiveParser.TOK_TABNAME && nodeIn.getParent().getChild(1) != null) {
LOG.info("From DB/Table Alias:: "+nodeIn.getParent().getChild(1).getText());
LOG.debug("From DB/Table Alias:: "+nodeIn.getParent().getChild(1).getText());
hlbSbls.setTableAlias(nodeIn.getParent().getChild(1).getText());
}
sourceTables.add(hlbSbls);
......@@ -309,7 +329,7 @@ private void ftdump(ASTNode nodeIn) {
}
}
}
}
}
/**
* Walks the column Tree called by processWalker
......@@ -318,32 +338,37 @@ private void ftdump(ASTNode nodeIn) {
public void columnTableDump(ASTNode nodeIn) {
counter = 0;
clmnTdump(nodeIn);
}
}
/**
* Walks the columnDump Tree called by columnTableDump
*/
private void clmnTdump(ASTNode nodeIn) {
boolean parseChild = true;
if (nodeIn.getType() == HiveParser.TOK_TABLE_OR_COL && nodeIn.getAncestor(HiveParser.TOK_SELEXPR) != null ) {
if (nodeIn.getType() == HiveParser.TOK_TABLE_OR_COL && nodeIn.getAncestor(HiveParser.TOK_SELEXPR) != null && !(nodeIn.hasAncestor(HiveParser.TOK_WHERE))) {
QueryColumns qclmns = hlb.new QueryColumns();
if (nodeIn.getAncestor(HiveParser.TOK_FUNCTION) != null && nodeIn.getAncestor(HiveParser.TOK_SELEXPR) != null) {
LOG.info("Function Query:: "+nodeIn.getAncestor(HiveParser.TOK_FUNCTION).getChild(0).getText());
LOG.debug("Function Query:: "+nodeIn.getAncestor(HiveParser.TOK_FUNCTION).getChild(0).getText());
qclmns.setColumnFunction(nodeIn.getAncestor(HiveParser.TOK_FUNCTION).getChild(0).getText());
}
if (nodeIn.getAncestor(HiveParser.TOK_FUNCTIONDI) != null && nodeIn.getAncestor(HiveParser.TOK_SELEXPR) != null) {
LOG.debug("Function Distinct Query:: "+nodeIn.getAncestor(HiveParser.TOK_FUNCTIONDI).getChild(0).getText());
qclmns.setColumnDistinctFunction(nodeIn.getAncestor(HiveParser.TOK_FUNCTIONDI).getChild(0).getText());
}
if (nodeIn.getParent().getText().equalsIgnoreCase(".")) {
LOG.info("Table Name/Alias:: "+nodeIn.getChild(0).getText());
LOG.debug("Table Name/Alias:: "+nodeIn.getChild(0).getText());
qclmns.setTbAliasOrName(nodeIn.getChild(0).getText());
LOG.info("Column:: "+nodeIn.getParent().getChild(1).getText());
LOG.debug("Column:: "+nodeIn.getParent().getChild(1).getText());
qclmns.setColumnName(nodeIn.getParent().getChild(1).getText());
if (nodeIn.getAncestor(HiveParser.TOK_SELEXPR).getChild(1) != null) {
LOG.info("Column Alias:: "+nodeIn.getAncestor(HiveParser.TOK_SELEXPR).getChild(1).getText());
LOG.debug("Column Alias:: "+nodeIn.getAncestor(HiveParser.TOK_SELEXPR).getChild(1).getText());
qclmns.setColumnAlias(nodeIn.getAncestor(HiveParser.TOK_SELEXPR).getChild(1).getText());
}
} else {
LOG.info("Column:: "+nodeIn.getChild(0).getText());
LOG.debug("Column:: "+nodeIn.getChild(0).getText());
qclmns.setColumnName(nodeIn.getChild(0).getText());
if (nodeIn.getParent().getChild(1) != null) {
LOG.info("Column Alias:: "+nodeIn.getParent().getChild(1).getText());
if ((nodeIn.getParent().getChild(1) != null && nodeIn.getParent().getChild(1).getType() != HiveParser.TOK_TABLE_OR_COL)) {
LOG.debug("Column Alias:: "+nodeIn.getParent().getChild(1).getText());
qclmns.setColumnAlias(nodeIn.getParent().getChild(1).getText());
}
}
......@@ -376,9 +401,55 @@ private void ftdump(ASTNode nodeIn) {
ParseDriver pd = new ParseDriver();
ASTNode tree = pd.parse(query);
LOG.info("DUMP TREE: "+tree.dump());
if (tree.getChild(0).getType() == HiveParser.TOK_DROPDATABASE ) {
hlb.setAction("drop_database");
}
if (tree.getChild(0).getType() == HiveParser.TOK_CREATEDATABASE) {
hlb.setAction("create_database");
}
if (tree.getChild(0).getType() == HiveParser.TOK_CREATETABLE) {
hlb.setAction("create_table");
}
if (tree.getChild(0).getType() == HiveParser.TOK_CREATEVIEW ) {
hlb.setAction("create_view");
}
if (tree.getChild(0).getType() == HiveParser.TOK_DROPTABLE) {
hlb.setAction("drop_table");
}
if (tree.getChild(0).getType() == HiveParser.TOK_INSERT) {
hlb.setAction("insert");
}
if (tree.getChild(0).getType() == HiveParser.TOK_INSERT_INTO) {
hlb.setAction("insert_into");
}
if (tree.getChild(0).getType() == HiveParser.TOK_DROPVIEW) {
hlb.setAction("drop_view");
}
if (tree.getChild(0).getType() == HiveParser.TOK_SHOWDATABASES) {
hlb.setAction("show_databases");
}
if (tree.getChild(0).getType() == HiveParser.TOK_SHOWTABLES) {
hlb.setAction("show_tables");
}
if (tree.getChild(0).getType() == HiveParser.TOK_ALTERVIEW_RENAME) {
hlb.setAction("alter_view_rename");
}
if (tree.getChild(0).getType() == HiveParser.TOK_ALTERTABLE_RENAME) {
hlb.setAction("alter_table_rename");
}
if (tree.getChild(0).getType() == HiveParser.TOK_ANALYZE) {
hlb.setAction("analyze");
}
if (tree.getChild(0).getType() == HiveParser. TOK_QUERY) {
hlb.setAction("select");
}
while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
tree = (ASTNode) tree.getChild(0);
}
sourceTables = new ArrayList<SourceTables>();
queryColumns = new ArrayList<QueryColumns>();
/*
* initialize Event Processor and dispatcher.
......@@ -396,8 +467,15 @@ private void ftdump(ASTNode nodeIn) {
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.add(tree);
ogw.startWalking(topNodes, null);
if (!(sourceTables.isEmpty())) {
this.hlb.setSourceTables(sourceTables);
}
if (!(queryColumns.isEmpty())) {
this.hlb.setQueryColumns(queryColumns);
}
}
//Main method to run tests and return json/gson feed from a query
public static void main(String[] args) throws IOException, ParseException,
......
......@@ -87,25 +87,29 @@ public class Hook implements ExecuteWithHookContext {
executionEngine="mr";
}
hiveId = sess.getSessionId();
String defaultdb = null;
switch(hookContext.getHookType()) {
case PRE_EXEC_HOOK:
Set<ReadEntity> db = hookContext.getInputs();
for (Object o : db) {
Set<ReadEntity> db_pre = hookContext.getInputs();
for (Object o : db_pre) {
LOG.debug("DB:Table="+o.toString());
defaultdb = o.toString().split("@")[0];
}
currentTime = System.currentTimeMillis();
HiveLineageInfo lep_pre = new HiveLineageInfo();
lep_pre.getLineageInfo(query);
hlb=lep_pre.getHLBean();
hlb.setDatabaseName(defaultdb);
hlb.setQueryEndTime(Long.toString(currentTime));
hlb.setQueryId(queryId);
hlb.setQuery(query);
hlb.setUser(user);
hlb.setHiveId(hiveId);
hlb.setSuccess(false);
if (executionEngine != null ) {
if (executionEngine.equalsIgnoreCase("mr")) {
hlb.setExecutionEngine("mapreduce");
}
......@@ -115,15 +119,24 @@ public class Hook implements ExecuteWithHookContext {
if (executionEngine.equalsIgnoreCase("spark")) {
hlb.setExecutionEngine("spark");
}
} else {
hlb.setExecutionEngine("local");
}
hlb.setQueryStartTime(queryStartTime);
fireAndForget(hookContext.getConf(), hlb, queryId);
break;
case POST_EXEC_HOOK:
Set<ReadEntity> db_post = hookContext.getInputs();
for (Object o : db_post) {
LOG.debug("DB:Table="+o.toString());
defaultdb = o.toString().split("@")[0];
}
currentTime = System.currentTimeMillis();
HiveLineageInfo lep_post = new HiveLineageInfo();
lep_post.getLineageInfo(query);
hlb=lep_post.getHLBean();
hlb.setDatabaseName(defaultdb);
hlb.setQueryEndTime(Long.toString(currentTime));
hlb.setQueryId(queryId);
hlb.setQuery(query);
......@@ -131,6 +144,7 @@ public class Hook implements ExecuteWithHookContext {
hlb.setQueryStartTime(queryStartTime);
hlb.setSuccess(true);
hlb.setHiveId(hiveId);
if (executionEngine != null ) {
if (executionEngine.equalsIgnoreCase("mr")) {
hlb.setExecutionEngine("mapreduce");
}
......@@ -140,13 +154,22 @@ public class Hook implements ExecuteWithHookContext {
if (executionEngine.equalsIgnoreCase("spark")) {
hlb.setExecutionEngine("spark");
}
} else {
hlb.setExecutionEngine("local");
}
fireAndForget(hookContext.getConf(), hlb, queryId);
break;
case ON_FAILURE_HOOK:
Set<ReadEntity> db_fail = hookContext.getInputs();
for (Object o : db_fail) {
LOG.debug("DB:Table="+o.toString());
defaultdb = o.toString().split("@")[0];
}
HiveLineageInfo lep_failed = new HiveLineageInfo();
lep_failed.getLineageInfo(query);
hlb=lep_failed.getHLBean();
hlb.setDatabaseName(defaultdb);
hlb.setQueryEndTime(Long.toString(currentTime));
hlb.setQueryId(queryId);
hlb.setQuery(query);
......@@ -155,6 +178,7 @@ public class Hook implements ExecuteWithHookContext {
hlb.setSuccess(false);
hlb.setFailed(true);
hlb.setHiveId(hiveId);
if (executionEngine != null ) {
if (executionEngine.equalsIgnoreCase("mr")) {
hlb.setExecutionEngine("mapreduce");
}
......@@ -164,6 +188,9 @@ public class Hook implements ExecuteWithHookContext {
if (executionEngine.equalsIgnoreCase("spark")) {
hlb.setExecutionEngine("spark");
}
} else {
hlb.setExecutionEngine("local");
}
fireAndForget(hookContext.getConf(), hlb, queryId);
break;
default:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment